summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
authorBo Liu <boliu@google.com>2014-05-01 10:37:37 -0700
committerBo Liu <boliu@google.com>2014-05-01 10:37:37 -0700
commit5c02ac1a9c1b504631c0a3d2b6e737b5d738bae1 (patch)
tree5df224c921d52ab02739b38e2149652208b023e6 /tools
parent56e9d6e218656f3d05620cc9877fd61482337fb1 (diff)
downloadchromium_org-5c02ac1a9c1b504631c0a3d2b6e737b5d738bae1.tar.gz
Merge from Chromium at DEPS revision 267519
This commit was generated by merge_to_master.py. Change-Id: I6002987b71e09742c68bad9c834ae800ff531f25
Diffstat (limited to 'tools')
-rwxr-xr-xtools/android/asan/libclang_rt.asan-arm-android.sobin1330928 -> 0 bytes
-rwxr-xr-xtools/android/memdump/memreport.py278
-rwxr-xr-xtools/android/mempressure.py10
-rwxr-xr-xtools/bisect-perf-regression.py319
-rw-r--r--tools/bisect-perf-regression_test.py31
-rw-r--r--tools/bisect_utils.py7
-rwxr-xr-xtools/checkdeps/builddeps.py287
-rwxr-xr-xtools/checkdeps/checkdeps.py29
-rw-r--r--tools/checkdeps/cpp_checker.py4
-rw-r--r--tools/checkdeps/rules.py6
-rw-r--r--tools/clang/blink_gc_plugin/RecordInfo.cpp23
-rwxr-xr-xtools/clang/scripts/update.py16
-rwxr-xr-xtools/clang/scripts/update.sh23
-rw-r--r--tools/cr/cr/actions/ninja.py9
-rw-r--r--tools/cr/cr/base/android.py1
-rw-r--r--tools/cr/cr/base/platform.py2
-rw-r--r--tools/gn/escape.cc3
-rw-r--r--tools/gn/escape_unittest.cc19
-rw-r--r--tools/gn/filesystem_utils.cc17
-rw-r--r--tools/gn/filesystem_utils.h4
-rw-r--r--tools/gn/filesystem_utils_unittest.cc17
-rw-r--r--tools/gn/input_file_manager.cc123
-rw-r--r--tools/gn/ninja_action_target_writer_unittest.cc12
-rw-r--r--tools/gn/ninja_build_writer.cc76
-rw-r--r--tools/gn/ninja_build_writer.h3
-rw-r--r--tools/gn/parser.cc11
-rw-r--r--tools/gn/parser_unittest.cc1
-rw-r--r--tools/gn/secondary/sdch/BUILD.gn7
-rw-r--r--tools/gn/secondary/third_party/expat/BUILD.gn37
-rw-r--r--tools/gn/secondary/third_party/libevent/BUILD.gn45
-rw-r--r--tools/gn/secondary/third_party/libxml/BUILD.gn190
-rw-r--r--tools/gn/secondary/third_party/mach_override/BUILD.gn37
-rw-r--r--tools/gn/secondary/third_party/snappy/BUILD.gn46
-rw-r--r--tools/gn/secondary/third_party/wtl/BUILD.gn14
-rw-r--r--tools/gn/secondary/third_party/zlib/BUILD.gn102
-rw-r--r--tools/gn/secondary/tools/grit/grit_rule.gni2
-rw-r--r--tools/gn/secondary/v8/BUILD.gn1326
-rw-r--r--tools/gn/target.cc7
-rw-r--r--tools/gn/target.h3
-rw-r--r--tools/gn/target_unittest.cc30
-rw-r--r--tools/gritsettings/resource_ids7
-rw-r--r--tools/ipc_fuzzer/mutate/mutate.gyp18
-rw-r--r--tools/json_schema_compiler/BUILD.gn13
-rw-r--r--tools/json_schema_compiler/api_gen_util.target.darwin-arm64.mk247
-rw-r--r--tools/json_schema_compiler/api_gen_util.target.linux-arm64.mk247
-rw-r--r--tools/json_schema_compiler/cc_generator.py9
-rw-r--r--tools/json_schema_compiler/test/tabs.json2
-rwxr-xr-xtools/linux/dump-static-initializers.py5
-rw-r--r--tools/memory_inspector/PRESUBMIT.py6
-rw-r--r--tools/memory_inspector/memory_inspector/backends/android/android_backend.py47
-rw-r--r--tools/metrics/actions/actions.xml5
-rw-r--r--tools/metrics/histograms/histograms.xml218
-rw-r--r--tools/msan/blacklist.txt4
-rw-r--r--tools/perf/OWNERS1
-rw-r--r--tools/perf/benchmarks/OWNERS3
-rw-r--r--tools/perf/benchmarks/browsermark.py16
-rw-r--r--tools/perf/benchmarks/canvasmark.py17
-rw-r--r--tools/perf/benchmarks/dom_perf.py32
-rw-r--r--tools/perf/benchmarks/dromaeo.py9
-rw-r--r--tools/perf/benchmarks/html5gaming.py16
-rw-r--r--tools/perf/benchmarks/indexeddb_perf.py8
-rw-r--r--tools/perf/benchmarks/jsgamebench.py12
-rw-r--r--tools/perf/benchmarks/kraken.py12
-rw-r--r--tools/perf/benchmarks/maps.py38
-rw-r--r--tools/perf/benchmarks/octane.py16
-rw-r--r--tools/perf/benchmarks/page_cycler.py1
-rw-r--r--tools/perf/benchmarks/peacekeeper.py25
-rw-r--r--tools/perf/benchmarks/pica.py2
-rw-r--r--tools/perf/benchmarks/polymer_load.py13
-rw-r--r--tools/perf/benchmarks/rasterize_and_record_micro.py30
-rw-r--r--tools/perf/benchmarks/repaint.py6
-rw-r--r--tools/perf/benchmarks/robohornet_pro.py17
-rw-r--r--tools/perf/benchmarks/scirra.py20
-rw-r--r--tools/perf/benchmarks/silk_flags.py8
-rw-r--r--tools/perf/benchmarks/smoothness.py61
-rw-r--r--tools/perf/benchmarks/spaceport.py6
-rw-r--r--tools/perf/benchmarks/sunspider.py12
-rw-r--r--tools/perf/benchmarks/thread_times.py9
-rw-r--r--tools/perf/measurements/OWNERS3
-rw-r--r--tools/perf/measurements/polymer_load.py37
-rw-r--r--tools/perf/measurements/rasterize_and_record_micro.py2
-rw-r--r--tools/perf/measurements/rasterize_and_record_micro_unittest.py2
-rw-r--r--tools/perf/measurements/repaint_unittest.py15
-rw-r--r--tools/perf/measurements/smooth_gesture_util.py3
-rw-r--r--tools/perf/measurements/smooth_gesture_util_unittest.py25
-rw-r--r--tools/perf/measurements/smoothness.py1
-rw-r--r--tools/perf/measurements/smoothness_unittest.py4
-rw-r--r--tools/perf/measurements/timeline_based_measurement_unittest.py13
-rw-r--r--tools/perf/metrics/OWNERS3
-rw-r--r--tools/perf/metrics/rendering_stats.py103
-rw-r--r--tools/perf/metrics/rendering_stats_unittest.py109
-rw-r--r--tools/perf/metrics/unittest_data/OWNERS3
-rw-r--r--tools/perf/page_sets/chrome_proxy/OWNERS3
-rw-r--r--tools/perf/page_sets/data/OWNERS3
-rw-r--r--tools/perf/page_sets/data/polymer.json8
-rw-r--r--tools/perf/page_sets/data/polymer_000.wpr.sha11
-rw-r--r--tools/perf/page_sets/key_silk_cases.py24
-rw-r--r--tools/perf/page_sets/mse_cases/startup_test.js3
-rw-r--r--tools/perf/page_sets/pica.py17
-rw-r--r--tools/perf/page_sets/polymer.py84
-rw-r--r--tools/perf/page_sets/tough_scheduling_cases.py6
-rw-r--r--tools/perf_expectations/perf_expectations.json2
-rwxr-xr-xtools/resources/list_resources_removed_by_repack.py99
-rwxr-xr-xtools/run-bisect-perf-regression.py11
-rw-r--r--tools/telemetry/bootstrap_deps2
-rw-r--r--tools/telemetry/telemetry/core/backends/adb_commands.py25
-rw-r--r--tools/telemetry/telemetry/core/backends/chrome/android_browser_backend.py42
-rw-r--r--tools/telemetry/telemetry/core/backends/chrome/android_browser_finder.py3
-rw-r--r--tools/telemetry/telemetry/core/backends/chrome/android_browser_finder_unittest.py7
-rw-r--r--tools/telemetry/telemetry/core/backends/chrome/chrome_browser_backend.py7
-rw-r--r--tools/telemetry/telemetry/core/backends/chrome/chrome_browser_options.py3
-rw-r--r--tools/telemetry/telemetry/core/backends/chrome/cros_browser_backend.py143
-rw-r--r--tools/telemetry/telemetry/core/backends/chrome/cros_test_case.py76
-rw-r--r--tools/telemetry/telemetry/core/backends/chrome/cros_unittest.py94
-rw-r--r--tools/telemetry/telemetry/core/backends/chrome/desktop_browser_backend.py106
-rw-r--r--tools/telemetry/telemetry/core/backends/chrome/inspector_backend.py5
-rw-r--r--tools/telemetry/telemetry/core/backends/chrome/inspector_memory_unittest.py7
-rw-r--r--tools/telemetry/telemetry/core/backends/chrome/inspector_runtime_unittest.py4
-rw-r--r--tools/telemetry/telemetry/core/backends/chrome/misc_web_contents_backend.py7
-rw-r--r--tools/telemetry/telemetry/core/backends/chrome/oobe.py113
-rw-r--r--tools/telemetry/telemetry/core/backends/chrome/websocket_unittest.py34
-rw-r--r--tools/telemetry/telemetry/core/forwarders/android_forwarder.py46
-rw-r--r--tools/telemetry/telemetry/core/memory_cache_http_server.py3
-rw-r--r--tools/telemetry/telemetry/core/platform/android_platform_backend.py49
-rw-r--r--tools/telemetry/telemetry/core/platform/android_platform_backend_unittest.py11
-rw-r--r--tools/telemetry/telemetry/core/platform/linux_platform_backend.py17
-rw-r--r--tools/telemetry/telemetry/core/platform/power_monitor/android_ds2784_power_monitor.py19
-rw-r--r--tools/telemetry/telemetry/core/platform/power_monitor/android_dumpsys_power_monitor.py15
-rw-r--r--tools/telemetry/telemetry/core/platform/power_monitor/android_temperature_monitor.py6
-rw-r--r--tools/telemetry/telemetry/core/platform/power_monitor/android_temperature_monitor_unittest.py4
-rw-r--r--tools/telemetry/telemetry/core/platform/power_monitor/powermetrics_power_monitor_unittest.py6
-rw-r--r--tools/telemetry/telemetry/core/platform/proc_supporting_platform_backend_unittest.py30
-rw-r--r--tools/telemetry/telemetry/core/platform/profiler/android_memreport_profiler.py40
-rw-r--r--tools/telemetry/telemetry/core/platform/profiler/android_prebuilt_profiler_helper.py7
-rw-r--r--tools/telemetry/telemetry/core/platform/profiler/android_screen_recorder_profiler.py2
-rw-r--r--tools/telemetry/telemetry/core/platform/profiler/android_traceview_profiler.py7
-rw-r--r--tools/telemetry/telemetry/core/platform/profiler/java_heap_profiler.py7
-rw-r--r--tools/telemetry/telemetry/core/platform/profiler/netlog_profiler.py6
-rw-r--r--tools/telemetry/telemetry/core/platform/profiler/perf_profiler.py31
-rw-r--r--tools/telemetry/telemetry/core/platform/profiler/perf_profiler_unittest.py3
-rw-r--r--tools/telemetry/telemetry/core/platform/profiler/tcmalloc_heap_profiler.py6
-rw-r--r--tools/telemetry/telemetry/core/platform/profiler/tcpdump_profiler.py7
-rw-r--r--tools/telemetry/telemetry/core/platform/profiler/v8_profiler.py6
-rw-r--r--tools/telemetry/telemetry/core/tab_unittest.py7
-rw-r--r--tools/telemetry/telemetry/core/webpagereplay.py37
-rw-r--r--tools/telemetry/telemetry/page/actions/gesture_action.py5
-rw-r--r--tools/telemetry/telemetry/page/actions/gesture_action_unittest.py34
-rw-r--r--tools/telemetry/telemetry/page/actions/loop_unittest.py6
-rw-r--r--tools/telemetry/telemetry/page/actions/play_unittest.py6
-rw-r--r--tools/telemetry/telemetry/page/actions/wait_unittest.py52
-rw-r--r--tools/telemetry/telemetry/page/block_page_measurement_results_unittest.py13
-rw-r--r--tools/telemetry/telemetry/page/buildbot_page_measurement_results_unittest.py14
-rw-r--r--tools/telemetry/telemetry/page/csv_page_measurement_results_unittest.py11
-rw-r--r--tools/telemetry/telemetry/page/html_page_measurement_results.py9
-rw-r--r--tools/telemetry/telemetry/page/html_page_measurement_results_unittest.py14
-rw-r--r--tools/telemetry/telemetry/page/page.py9
-rw-r--r--tools/telemetry/telemetry/page/page_measurement_results_unittest.py14
-rw-r--r--tools/telemetry/telemetry/page/page_measurement_unittest.py2
-rw-r--r--tools/telemetry/telemetry/page/page_measurement_unittest_base.py26
-rw-r--r--tools/telemetry/telemetry/page/page_runner.py4
-rw-r--r--tools/telemetry/telemetry/page/page_runner_unittest.py44
-rw-r--r--tools/telemetry/telemetry/page/page_set.py43
-rw-r--r--tools/telemetry/telemetry/page/page_set_unittest.py39
-rw-r--r--tools/telemetry/telemetry/page/page_test_results_unittest.py13
-rw-r--r--tools/telemetry/telemetry/page/page_unittest.py109
-rw-r--r--tools/telemetry/telemetry/unittest/simple_mock.py11
-rw-r--r--tools/telemetry/telemetry/unittest/system_stub.py10
-rw-r--r--tools/telemetry/telemetry/unittest/tab_test_case.py5
-rw-r--r--tools/telemetry/telemetry/value/histogram_unittest.py13
-rw-r--r--tools/telemetry/telemetry/value/list_of_scalar_values_unittest.py13
-rw-r--r--tools/telemetry/telemetry/value/list_of_string_values.py91
-rw-r--r--tools/telemetry/telemetry/value/list_of_string_values_unittest.py78
-rw-r--r--tools/telemetry/telemetry/value/merge_values_unittest.py13
-rw-r--r--tools/telemetry/telemetry/value/scalar_unittest.py13
-rw-r--r--tools/telemetry/telemetry/value/string.py68
-rw-r--r--tools/telemetry/telemetry/value/string_unittest.py61
-rw-r--r--tools/telemetry/telemetry/value/value_unittest_.py13
-rw-r--r--tools/telemetry/unittest_data/data/archive_blank.json8
-rw-r--r--tools/telemetry/unittest_data/data/archive_blank_000.wpr.sha11
-rw-r--r--tools/valgrind/browser_wrapper_win.py2
-rwxr-xr-xtools/valgrind/chrome_tests.py20
-rw-r--r--tools/valgrind/drmemory/suppressions.txt18
-rw-r--r--tools/valgrind/drmemory/suppressions_full.txt48
-rwxr-xr-xtools/valgrind/drmemory_analyze.py8
-rw-r--r--tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory_win32.txt14
-rw-r--r--tools/valgrind/gtest_exclude/content_browsertests.gtest-tsan.txt4
-rw-r--r--tools/valgrind/memcheck/suppressions.txt53
-rw-r--r--tools/valgrind/tsan_v2/ignores.txt4
-rw-r--r--tools/valgrind/tsan_v2/suppressions.txt14
189 files changed, 4272 insertions, 2807 deletions
diff --git a/tools/android/asan/libclang_rt.asan-arm-android.so b/tools/android/asan/libclang_rt.asan-arm-android.so
deleted file mode 100755
index d679070dbe..0000000000
--- a/tools/android/asan/libclang_rt.asan-arm-android.so
+++ /dev/null
Binary files differ
diff --git a/tools/android/memdump/memreport.py b/tools/android/memdump/memreport.py
deleted file mode 100755
index 1782d6a5c4..0000000000
--- a/tools/android/memdump/memreport.py
+++ /dev/null
@@ -1,278 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections
-import json
-import optparse
-import os
-import re
-import threading
-import time
-import sys
-
-from sets import Set
-from string import Template
-
-sys.path.append(os.path.join(sys.path[0], os.pardir, os.pardir, os.pardir,
- 'build','android'))
-from pylib import android_commands
-from pylib import constants
-
-
-_ENTRIES = [
- ('Total', '.* r... '),
- ('Read-only', '.* r--. '),
- ('Read-write', '.* rw.. '),
- ('Executable', '.* ..x. '),
- ('Anonymous total', '.* ""'),
- ('Anonymous read-write', '.* rw.. .* ""'),
- ('Anonymous executable (JIT\'ed code)', '.* ..x. .* ""'),
- ('File total', '.* .... .* "/.*"'),
- ('File read-write', '.* rw.. .* "/.*"'),
- ('File executable', '.* ..x. .* "/.*"'),
- ('/dev files', '.* r... .* "/dev/.*"'),
- ('Dalvik', '.* rw.. .* "/.*dalvik.*"'),
- ('Dalvik heap', '.* rw.. .* "/.*dalvik-heap.*"'),
- ('Native heap (malloc)', '.* r... .* ".*malloc.*"'),
- ('Ashmem', '.* rw.. .* "/dev/ashmem '),
- ('Native library total', '.* r... .* "/data/app-lib/'),
- ('Native library read-only', '.* r--. .* "/data/app-lib/'),
- ('Native library read-write', '.* rw-. .* "/data/app-lib/'),
- ('Native library executable', '.* r.x. .* "/data/app-lib/'),
-]
-
-
-def _CollectMemoryStats(memdump, region_filters):
- processes = []
- mem_usage_for_regions = None
- regexps = {}
- for region_filter in region_filters:
- regexps[region_filter] = re.compile(region_filter)
- for line in memdump:
- if 'PID=' in line:
- mem_usage_for_regions = {}
- processes.append(mem_usage_for_regions)
- continue
- matched_regions = Set([])
- for region_filter in region_filters:
- if regexps[region_filter].match(line.rstrip('\r\n')):
- matched_regions.add(region_filter)
- if not region_filter in mem_usage_for_regions:
- mem_usage_for_regions[region_filter] = {
- 'private_unevictable': 0,
- 'private': 0,
- 'shared_app': 0.0,
- 'shared_app_unevictable': 0.0,
- 'shared_other_unevictable': 0,
- 'shared_other': 0,
- }
- for matched_region in matched_regions:
- mem_usage = mem_usage_for_regions[matched_region]
- for key in mem_usage:
- for token in line.split(' '):
- if (key + '=') in token:
- field = token.split('=')[1]
- if key != 'shared_app':
- mem_usage[key] += int(field)
- else: # shared_app=[\d:\d,\d:\d...]
- array = field[1:-1].split(',')
- for i in xrange(len(array)):
- shared_app, shared_app_unevictable = array[i].split(':')
- mem_usage['shared_app'] += float(shared_app) / (i + 2)
- mem_usage['shared_app_unevictable'] += \
- float(shared_app_unevictable) / (i + 2)
- break
- return processes
-
-
-def _ConvertMemoryField(field):
- return str(field / (1024.0 * 1024))
-
-
-def _DumpCSV(processes_stats):
- total_map = {}
- i = 0
- for process in processes_stats:
- i += 1
- print (',Process ' + str(i) + ',private,private_unevictable,shared_app,' +
- 'shared_app_unevictable,shared_other,shared_other_unevictable,')
- for (k, v) in _ENTRIES:
- if not v in process:
- print ',' + k + ',0,0,0,0,0,'
- continue
- if not v in total_map:
- total_map[v] = {'resident':0, 'unevictable':0}
- total_map[v]['resident'] += (process[v]['private'] +
- process[v]['shared_app'])
- total_map[v]['unevictable'] += process[v]['private_unevictable'] + \
- process[v]['shared_app_unevictable']
- print (
- ',' + k + ',' +
- _ConvertMemoryField(process[v]['private']) + ',' +
- _ConvertMemoryField(process[v]['private_unevictable']) + ',' +
- _ConvertMemoryField(process[v]['shared_app']) + ',' +
- _ConvertMemoryField(process[v]['shared_app_unevictable']) + ',' +
- _ConvertMemoryField(process[v]['shared_other']) + ',' +
- _ConvertMemoryField(process[v]['shared_other_unevictable']) + ','
- )
- print ''
-
- for (k, v) in _ENTRIES:
- if not v in total_map:
- print ',' + k + ',0,0,'
- continue
- print (',' + k + ',' + _ConvertMemoryField(total_map[v]['resident']) + ',' +
- _ConvertMemoryField(total_map[v]['unevictable']) + ',')
- print ''
-
-
-def _RunManualGraph(package_name, interval):
- _AREA_TYPES = ('private', 'private_unevictable', 'shared_app',
- 'shared_app_unevictable', 'shared_other',
- 'shared_other_unevictable')
- all_pids = {}
- legends = ['Seconds'] + [entry + '_' + area
- for entry, _ in _ENTRIES
- for area in _AREA_TYPES]
- should_quit = threading.Event()
-
- def _GenerateGraph():
- _HTML_TEMPLATE = """
-<html>
- <head>
- <script type='text/javascript' src='https://www.google.com/jsapi'></script>
- <script type='text/javascript'>
- google.load('visualization', '1', {packages:['corechart', 'table']});
- google.setOnLoadCallback(createPidSelector);
- var pids = $JSON_PIDS;
- var pids_info = $JSON_PIDS_INFO;
- function drawVisualization(pid) {
- var data = google.visualization.arrayToDataTable(
- pids_info[pid]
- );
-
- var charOptions = {
- title: 'Memory Report (KB) for ' + pid,
- vAxis: {title: 'Time', titleTextStyle: {color: 'red'}},
- isStacked : true
- };
-
- var chart = new google.visualization.BarChart(
- document.getElementById('chart_div'));
- chart.draw(data, charOptions);
-
- var table = new google.visualization.Table(
- document.getElementById('table_div'));
- table.draw(data);
- }
-
- function createPidSelector() {
- var pid_selector = document.getElementById('pid_selector');
- for (pid in pids) {
- var option = document.createElement('option');
- option.text = option.value = pids[pid];
- pid_selector.appendChild(option);
- }
- pid_selector.addEventListener('change',
- function() {
- drawVisualization(this.selectedOptions[0].value);
- }
- );
- drawVisualization(pids[0]);
- }
- </script>
- </head>
- <body>
- PIDS: <select id='pid_selector'></select>
- <div id='chart_div' style="width: 1024px; height: 800px;"></div>
- <div id='table_div' style="width: 1024px; height: 640px;"></div>
- </body>
-</html>
-"""
- pids = sorted(all_pids.keys())
- pids_info = dict(zip(pids,
- [ [legends] +
- all_pids[p] for p in pids
- ]))
- print Template(_HTML_TEMPLATE).safe_substitute({
- 'JSON_PIDS': json.dumps(pids),
- 'JSON_PIDS_INFO': json.dumps(pids_info)
- })
-
-
-
- def _CollectStats(count):
- adb = android_commands.AndroidCommands()
- pid_list = adb.ExtractPid(package_name)
- memdump = adb.RunShellCommand('/data/local/tmp/memdump ' +
- ' '.join(pid_list))
- process_stats = _CollectMemoryStats(memdump,
- [value for (key, value) in _ENTRIES])
- for (pid, process) in zip(pid_list, process_stats):
- first_pid_entry = True
- for (k, v) in _ENTRIES:
- if v not in process:
- continue
- for area_type in _AREA_TYPES:
- legend = k + '_' + area_type
- if pid not in all_pids:
- all_pids[pid] = []
- if first_pid_entry:
- all_pids[pid].append(['%ds' % (count * interval)] +
- [0] * (len(legends) - 1))
- first_pid_entry = False
- mem_kb = process[v][area_type] / 1024
- all_pids[pid][-1][legends.index(legend)] = mem_kb
-
- def _Loop():
- count = 0
- while not should_quit.is_set():
- print >>sys.stderr, 'Collecting ', count
- _CollectStats(count)
- count += 1
- should_quit.wait(interval)
-
- t = threading.Thread(target=_Loop)
-
-
- print >>sys.stderr, 'Press enter or CTRL+C to stop'
- t.start()
- try:
- _ = raw_input()
- except KeyboardInterrupt:
- pass
- finally:
- should_quit.set()
-
- t.join()
-
- _GenerateGraph()
-
-
-def main(argv):
- parser = optparse.OptionParser(usage='Usage: %prog [options]',
- description=__doc__)
- parser.add_option('-m',
- '--manual-graph',
- action='store_true',
- help='Manually collect data and generate a graph.')
- parser.add_option('-p',
- '--package',
- default=constants.PACKAGE_INFO['chrome'].package,
- help='Package name to collect.')
- parser.add_option('-i',
- '--interval',
- default=5,
- type='int',
- help='Interval in seconds for manual collections.')
- options, args = parser.parse_args(argv)
- if options.manual_graph:
- return _RunManualGraph(options.package, options.interval)
- _DumpCSV(_CollectMemoryStats(sys.stdin, [value for (key, value) in _ENTRIES]))
-
-
-if __name__ == '__main__':
- main(sys.argv)
diff --git a/tools/android/mempressure.py b/tools/android/mempressure.py
index 04486ecbb8..82fe2206df 100755
--- a/tools/android/mempressure.py
+++ b/tools/android/mempressure.py
@@ -14,9 +14,9 @@ BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__),
'build',
'android')
sys.path.append(BUILD_ANDROID_DIR)
-from pylib import android_commands
from pylib import constants
from pylib import flag_changer
+from pylib.device import device_utils
# Browser Constants
DEFAULT_BROWSER = 'chrome'
@@ -84,14 +84,14 @@ def main(argv):
package = package_info.package
activity = package_info.activity
- adb = android_commands.AndroidCommands(device=None)
+ device = device_utils.DeviceUtils(None)
- adb.EnableAdbRoot()
- flags = flag_changer.FlagChanger(adb, package_info.cmdline_file)
+ device.old_interface.EnableAdbRoot()
+ flags = flag_changer.FlagChanger(device, package_info.cmdline_file)
if ENABLE_TEST_INTENTS_FLAG not in flags.Get():
flags.AddFlags([ENABLE_TEST_INTENTS_FLAG])
- adb.StartActivity(package, activity, action=action)
+ device.old_interface.StartActivity(package, activity, action=action)
if __name__ == '__main__':
sys.exit(main(sys.argv))
diff --git a/tools/bisect-perf-regression.py b/tools/bisect-perf-regression.py
index 6b98b4b40d..f05b6e6e91 100755
--- a/tools/bisect-perf-regression.py
+++ b/tools/bisect-perf-regression.py
@@ -76,7 +76,7 @@ DEPOT_DEPS_NAME = {
"depends" : None,
"from" : ['cros', 'android-chrome'],
'viewvc': 'http://src.chromium.org/viewvc/chrome?view=revision&revision=',
- 'deps_var': None
+ 'deps_var': 'chromium_rev'
},
'webkit' : {
"src" : "src/third_party/WebKit",
@@ -159,9 +159,9 @@ BUILD_RESULT_SKIPPED = 2
# Maximum time in seconds to wait after posting build request to tryserver.
# TODO: Change these values based on the actual time taken by buildbots on
# the tryserver.
-MAX_MAC_BUILD_TIME = 7200
-MAX_WIN_BUILD_TIME = 7200
-MAX_LINUX_BUILD_TIME = 7200
+MAX_MAC_BUILD_TIME = 14400
+MAX_WIN_BUILD_TIME = 14400
+MAX_LINUX_BUILD_TIME = 14400
# Patch template to add a new file, DEPS.sha under src folder.
# This file contains SHA1 value of the DEPS changes made while bisecting
@@ -177,6 +177,13 @@ new file mode 100644
+%(deps_sha)s
"""
+# The possible values of the --bisect_mode flag, which determines what to
+# use when classifying a revision as "good" or "bad".
+BISECT_MODE_MEAN = 'mean'
+BISECT_MODE_STD_DEV = 'std_dev'
+BISECT_MODE_RETURN_CODE = 'return_code'
+
+
def _AddAdditionalDepotInfo(depot_info):
"""Adds additional depot info to the global depot variables."""
global DEPOT_DEPS_NAME
@@ -688,13 +695,19 @@ def BuildWithVisualStudio(targets):
def WriteStringToFile(text, file_name):
- with open(file_name, "w") as f:
- f.write(text)
+ try:
+ with open(file_name, "w") as f:
+ f.write(text)
+ except IOError as e:
+ raise RuntimeError('Error writing to file [%s]' % file_name )
def ReadStringFromFile(file_name):
- with open(file_name) as f:
- return f.read()
+ try:
+ with open(file_name) as f:
+ return f.read()
+ except IOError as e:
+ raise RuntimeError('Error reading file [%s]' % file_name )
def ChangeBackslashToSlashInPatch(diff_text):
@@ -740,8 +753,6 @@ class Builder(object):
if not bisect_utils.SetupPlatformBuildEnvironment(opts):
raise RuntimeError('Failed to set platform environment.')
- bisect_utils.RunGClient(['runhooks'])
-
@staticmethod
def FromOpts(opts):
builder = None
@@ -1367,23 +1378,47 @@ class BisectPerformanceMetrics(object):
return bleeding_edge_revision
- def Get3rdPartyRevisionsFromCurrentRevision(self, depot, revision):
- """Parses the DEPS file to determine WebKit/v8/etc... versions.
+ def _ParseRevisionsFromDEPSFileManually(self, deps_file_contents):
+ """Manually parses the vars section of the DEPS file to determine
+ chromium/blink/etc... revisions.
Returns:
A dict in the format {depot:revision} if successful, otherwise None.
"""
- cwd = os.getcwd()
- self.ChangeToDepotWorkingDirectory(depot)
+ # We'll parse the "vars" section of the DEPS file.
+ rxp = re.compile('vars = {(?P<vars_body>[^}]+)', re.MULTILINE)
+ re_results = rxp.search(deps_file_contents)
+ locals = {}
- results = {}
+ if not re_results:
+ return None
- if depot == 'chromium' or depot == 'android-chrome':
+ # We should be left with a series of entries in the vars component of
+ # the DEPS file with the following format:
+ # 'depot_name': 'revision',
+ vars_body = re_results.group('vars_body')
+ rxp = re.compile("'(?P<depot_body>[\w_-]+)':[\s]+'(?P<rev_body>[\w@]+)'",
+ re.MULTILINE)
+ re_results = rxp.findall(vars_body)
+
+ return dict(re_results)
+
+ def _ParseRevisionsFromDEPSFile(self, depot):
+ """Parses the local DEPS file to determine blink/skia/v8 revisions which may
+ be needed if the bisect recurses into those depots later.
+
+ Args:
+ depot: Depot being bisected.
+
+ Returns:
+ A dict in the format {depot:revision} if successful, otherwise None.
+ """
+ try:
locals = {'Var': lambda _: locals["vars"][_],
'From': lambda *args: None}
execfile(bisect_utils.FILE_DEPS_GIT, {}, locals)
-
- os.chdir(cwd)
+ locals = locals['deps']
+ results = {}
rxp = re.compile(".git@(?P<revision>[a-fA-F0-9]+)")
@@ -1394,28 +1429,61 @@ class BisectPerformanceMetrics(object):
if (DEPOT_DEPS_NAME[d]['recurse'] and
depot in DEPOT_DEPS_NAME[d]['from']):
- if (locals['deps'].has_key(DEPOT_DEPS_NAME[d]['src']) or
- locals['deps'].has_key(DEPOT_DEPS_NAME[d]['src_old'])):
- if locals['deps'].has_key(DEPOT_DEPS_NAME[d]['src']):
- re_results = rxp.search(locals['deps'][DEPOT_DEPS_NAME[d]['src']])
+ if (locals.has_key(DEPOT_DEPS_NAME[d]['src']) or
+ locals.has_key(DEPOT_DEPS_NAME[d]['src_old'])):
+ if locals.has_key(DEPOT_DEPS_NAME[d]['src']):
+ re_results = rxp.search(locals[DEPOT_DEPS_NAME[d]['src']])
self.depot_cwd[d] = \
os.path.join(self.src_cwd, DEPOT_DEPS_NAME[d]['src'][4:])
- elif locals['deps'].has_key(DEPOT_DEPS_NAME[d]['src_old']):
+ elif (DEPOT_DEPS_NAME[d].has_key('src_old') and
+ locals.has_key(DEPOT_DEPS_NAME[d]['src_old'])):
re_results = \
- rxp.search(locals['deps'][DEPOT_DEPS_NAME[d]['src_old']])
+ rxp.search(locals[DEPOT_DEPS_NAME[d]['src_old']])
self.depot_cwd[d] = \
os.path.join(self.src_cwd, DEPOT_DEPS_NAME[d]['src_old'][4:])
if re_results:
results[d] = re_results.group('revision')
else:
- print 'Couldn\'t parse revision for %s.' % d
- print
- return None
+ warning_text = ('Couldn\'t parse revision for %s while bisecting '
+ '%s' % (d, depot))
+ if not warningText in self.warnings:
+ self.warnings.append(warningText)
else:
print 'Couldn\'t find %s while parsing .DEPS.git.' % d
print
return None
+ return results
+ except ImportError:
+ deps_file_contents = ReadStringFromFile(bisect_utils.FILE_DEPS_GIT)
+ parse_results = self._ParseRevisionsFromDEPSFileManually(
+ deps_file_contents)
+ results = {}
+ for depot_name, depot_revision in parse_results.iteritems():
+ depot_revision = depot_revision.strip('@')
+ print depot_name, depot_revision
+ for current_name, current_data in DEPOT_DEPS_NAME.iteritems():
+ if (current_data.has_key('deps_var') and
+ current_data['deps_var'] == depot_name):
+ src_name = current_name
+ results[src_name] = depot_revision
+ break
+ return results
+
+ def Get3rdPartyRevisionsFromCurrentRevision(self, depot, revision):
+ """Parses the DEPS file to determine WebKit/v8/etc... versions.
+
+ Returns:
+ A dict in the format {depot:revision} if successful, otherwise None.
+ """
+ cwd = os.getcwd()
+ self.ChangeToDepotWorkingDirectory(depot)
+
+ results = {}
+
+ if depot == 'chromium' or depot == 'android-chrome':
+ results = self._ParseRevisionsFromDEPSFile(depot)
+ os.chdir(cwd)
elif depot == 'cros':
cmd = [CROS_SDK_PATH, '--', 'portageq-%s' % self.opts.cros_board,
'best_visible', '/build/%s' % self.opts.cros_board, 'ebuild',
@@ -1968,6 +2036,15 @@ class BisectPerformanceMetrics(object):
return False
return True
+ def _IsBisectModeUsingMetric(self):
+ return self.opts.bisect_mode in [BISECT_MODE_MEAN, BISECT_MODE_STD_DEV]
+
+ def _IsBisectModeReturnCode(self):
+ return self.opts.bisect_mode in [BISECT_MODE_RETURN_CODE]
+
+ def _IsBisectModeStandardDeviation(self):
+ return self.opts.bisect_mode in [BISECT_MODE_STD_DEV]
+
def RunPerformanceTestAndParseResults(
self, command_to_run, metric, reset_on_first_run=False,
upload_on_last_run=False, results_label=None):
@@ -2022,15 +2099,15 @@ class BisectPerformanceMetrics(object):
output_of_all_runs = ''
for i in xrange(self.opts.repeat_test_count):
# Can ignore the return code since if the tests fail, it won't return 0.
+ current_args = copy.copy(args)
+ if is_telemetry:
+ if i == 0 and reset_on_first_run:
+ current_args.append('--reset-results')
+ elif i == self.opts.repeat_test_count - 1 and upload_on_last_run:
+ current_args.append('--upload-results')
+ if results_label:
+ current_args.append('--results-label=%s' % results_label)
try:
- current_args = copy.copy(args)
- if is_telemetry:
- if i == 0 and reset_on_first_run:
- current_args.append('--reset-results')
- elif i == self.opts.repeat_test_count - 1 and upload_on_last_run:
- current_args.append('--upload-results')
- if results_label:
- current_args.append('--results-label=%s' % results_label)
(output, return_code) = RunProcessAndRetrieveOutput(current_args,
cwd=self.src_cwd)
except OSError, e:
@@ -2050,11 +2127,17 @@ class BisectPerformanceMetrics(object):
if self.opts.output_buildbot_annotations:
print output
- metric_values += self.ParseMetricValuesFromOutput(metric, output)
+ if self._IsBisectModeUsingMetric():
+ metric_values += self.ParseMetricValuesFromOutput(metric, output)
+ # If we're bisecting on a metric (ie, changes in the mean or
+ # standard deviation) and no metric values are produced, bail out.
+ if not metric_values:
+ break
+ elif self._IsBisectModeReturnCode():
+ metric_values.append(return_code)
elapsed_minutes = (time.time() - start_time) / 60.0
-
- if elapsed_minutes >= self.opts.max_time_minutes or not metric_values:
+ if elapsed_minutes >= self.opts.max_time_minutes:
break
if len(metric_values) == 0:
@@ -2063,22 +2146,43 @@ class BisectPerformanceMetrics(object):
# that were found in the output here.
return (err_text, failure_code, output_of_all_runs)
- # Need to get the average value if there were multiple values.
- truncated_mean = CalculateTruncatedMean(metric_values,
- self.opts.truncate_percent)
- standard_err = CalculateStandardError(metric_values)
- standard_dev = CalculateStandardDeviation(metric_values)
-
- values = {
- 'mean': truncated_mean,
- 'std_err': standard_err,
- 'std_dev': standard_dev,
- 'values': metric_values,
- }
-
- print 'Results of performance test: %12f %12f' % (
- truncated_mean, standard_err)
- print
+ # If we're bisecting on return codes, we're really just looking for zero vs
+ # non-zero.
+ if self._IsBisectModeReturnCode():
+ # If any of the return codes is non-zero, output 1.
+ overall_return_code = 0 if (
+ all(current_value == 0 for current_value in metric_values)) else 1
+
+ values = {
+ 'mean': overall_return_code,
+ 'std_err': 0.0,
+ 'std_dev': 0.0,
+ 'values': metric_values,
+ }
+
+ print 'Results of performance test: Command returned with %d' % (
+ overall_return_code)
+ print
+ else:
+ # Need to get the average value if there were multiple values.
+ truncated_mean = CalculateTruncatedMean(metric_values,
+ self.opts.truncate_percent)
+ standard_err = CalculateStandardError(metric_values)
+ standard_dev = CalculateStandardDeviation(metric_values)
+
+ if self._IsBisectModeStandardDeviation():
+ metric_values = [standard_dev]
+
+ values = {
+ 'mean': truncated_mean,
+ 'std_err': standard_err,
+ 'std_dev': standard_dev,
+ 'values': metric_values,
+ }
+
+ print 'Results of performance test: %12f %12f' % (
+ truncated_mean, standard_err)
+ print
return (values, success_code, output_of_all_runs)
def FindAllRevisionsToSync(self, revision, depot):
@@ -2339,7 +2443,7 @@ class BisectPerformanceMetrics(object):
return ('Failed to sync revision: [%s]' % (str(revision, )),
BUILD_RESULT_FAIL)
- def CheckIfRunPassed(self, current_value, known_good_value, known_bad_value):
+ def _CheckIfRunPassed(self, current_value, known_good_value, known_bad_value):
"""Given known good and bad values, decide if the current_value passed
or failed.
@@ -2352,8 +2456,14 @@ class BisectPerformanceMetrics(object):
True if the current_value is closer to the known_good_value than the
known_bad_value.
"""
- dist_to_good_value = abs(current_value['mean'] - known_good_value['mean'])
- dist_to_bad_value = abs(current_value['mean'] - known_bad_value['mean'])
+ if self.opts.bisect_mode == BISECT_MODE_STD_DEV:
+ dist_to_good_value = abs(current_value['std_dev'] -
+ known_good_value['std_dev'])
+ dist_to_bad_value = abs(current_value['std_dev'] -
+ known_bad_value['std_dev'])
+ else:
+ dist_to_good_value = abs(current_value['mean'] - known_good_value['mean'])
+ dist_to_bad_value = abs(current_value['mean'] - known_bad_value['mean'])
return dist_to_good_value < dist_to_bad_value
@@ -2909,9 +3019,9 @@ class BisectPerformanceMetrics(object):
next_revision_data['perf_time'] = run_results[3]
next_revision_data['build_time'] = run_results[4]
- passed_regression = self.CheckIfRunPassed(run_results[0],
- known_good_value,
- known_bad_value)
+ passed_regression = self._CheckIfRunPassed(run_results[0],
+ known_good_value,
+ known_bad_value)
next_revision_data['passed'] = passed_regression
next_revision_data['value'] = run_results[0]
@@ -2966,17 +3076,23 @@ class BisectPerformanceMetrics(object):
print " __o_\___ Aw Snap! We hit a speed bump!"
print "=-O----O-'__.~.___________________________________"
print
- print 'Bisect reproduced a %.02f%% (+-%.02f%%) change in the %s metric.' % (
- results_dict['regression_size'], results_dict['regression_std_err'],
- '/'.join(self.opts.metric))
+ if self._IsBisectModeReturnCode():
+ print ('Bisect reproduced a change in return codes while running the '
+ 'performance test.')
+ else:
+ print ('Bisect reproduced a %.02f%% (+-%.02f%%) change in the '
+ '%s metric.' % (results_dict['regression_size'],
+ results_dict['regression_std_err'], '/'.join(self.opts.metric)))
self._PrintConfidence(results_dict)
def _PrintFailedBanner(self, results_dict):
print
- print ('Bisect could not reproduce a change in the '
- '%s/%s metric.' % (self.opts.metric[0], self.opts.metric[1]))
+ if self._IsBisectModeReturnCode():
+ print 'Bisect could not reproduce a change in the return code.'
+ else:
+ print ('Bisect could not reproduce a change in the '
+ '%s metric.' % '/'.join(self.opts.metric))
print
- self._PrintConfidence(results_dict)
def _GetViewVCLinkFromDepotAndHash(self, cl, depot):
info = self.source_control.QueryRevisionInfo(cl,
@@ -3013,6 +3129,53 @@ class BisectPerformanceMetrics(object):
print 'Commit : %s' % cl
print 'Date : %s' % info['date']
+ def _PrintTableRow(self, column_widths, row_data):
+ assert len(column_widths) == len(row_data)
+
+ text = ''
+ for i in xrange(len(column_widths)):
+ current_row_data = row_data[i].center(column_widths[i], ' ')
+ text += ('%%%ds' % column_widths[i]) % current_row_data
+ print text
+
+ def _PrintTestedCommitsHeader(self):
+ if self.opts.bisect_mode == BISECT_MODE_MEAN:
+ self._PrintTableRow(
+ [20, 70, 14, 12, 13],
+ ['Depot', 'Commit SHA', 'Mean', 'Std. Error', 'State'])
+ elif self.opts.bisect_mode == BISECT_MODE_STD_DEV:
+ self._PrintTableRow(
+ [20, 70, 14, 12, 13],
+ ['Depot', 'Commit SHA', 'Std. Error', 'Mean', 'State'])
+ elif self.opts.bisect_mode == BISECT_MODE_RETURN_CODE:
+ self._PrintTableRow(
+ [20, 70, 14, 13],
+ ['Depot', 'Commit SHA', 'Return Code', 'State'])
+ else:
+ assert False, "Invalid bisect_mode specified."
+ print ' %20s %70s %14s %13s' % ('Depot'.center(20, ' '),
+ 'Commit SHA'.center(70, ' '), 'Return Code'.center(14, ' '),
+ 'State'.center(13, ' '))
+
+ def _PrintTestedCommitsEntry(self, current_data, cl_link, state_str):
+ if self.opts.bisect_mode == BISECT_MODE_MEAN:
+ std_error = '+-%.02f' % current_data['value']['std_err']
+ mean = '%.02f' % current_data['value']['mean']
+ self._PrintTableRow(
+ [20, 70, 12, 14, 13],
+ [current_data['depot'], cl_link, mean, std_error, state_str])
+ elif self.opts.bisect_mode == BISECT_MODE_STD_DEV:
+ std_error = '+-%.02f' % current_data['value']['std_err']
+ mean = '%.02f' % current_data['value']['mean']
+ self._PrintTableRow(
+ [20, 70, 12, 14, 13],
+ [current_data['depot'], cl_link, std_error, mean, state_str])
+ elif self.opts.bisect_mode == BISECT_MODE_RETURN_CODE:
+ mean = '%d' % current_data['value']['mean']
+ self._PrintTableRow(
+ [20, 70, 14, 13],
+ [current_data['depot'], cl_link, mean, state_str])
+
def _PrintTestedCommitsTable(self, revision_data_sorted,
first_working_revision, last_broken_revision, confidence,
final_step=True):
@@ -3021,9 +3184,7 @@ class BisectPerformanceMetrics(object):
print 'Tested commits:'
else:
print 'Partial results:'
- print ' %20s %70s %12s %14s %13s' % ('Depot'.center(20, ' '),
- 'Commit SHA'.center(70, ' '), 'Mean'.center(12, ' '),
- 'Std. Error'.center(14, ' '), 'State'.center(13, ' '))
+ self._PrintTestedCommitsHeader()
state = 0
for current_id, current_data in revision_data_sorted:
if current_data['value']:
@@ -3049,16 +3210,11 @@ class BisectPerformanceMetrics(object):
state_str = ''
state_str = state_str.center(13, ' ')
- std_error = ('+-%.02f' %
- current_data['value']['std_err']).center(14, ' ')
- mean = ('%.02f' % current_data['value']['mean']).center(12, ' ')
cl_link = self._GetViewVCLinkFromDepotAndHash(current_id,
current_data['depot'])
if not cl_link:
cl_link = current_id
- print ' %20s %70s %12s %14s %13s' % (
- current_data['depot'].center(20, ' '), cl_link.center(70, ' '),
- mean, std_error, state_str)
+ self._PrintTestedCommitsEntry(current_data, cl_link, state_str)
def _PrintReproSteps(self):
print
@@ -3433,6 +3589,7 @@ class BisectOptions(object):
self.target_arch = 'ia32'
self.builder_host = None
self.builder_port = None
+ self.bisect_mode = BISECT_MODE_MEAN
def _CreateCommandLineParser(self):
"""Creates a parser with bisect options.
@@ -3487,6 +3644,13 @@ class BisectOptions(object):
'truncated mean. Values will be clamped to range [0, '
'25]. Default value is 25 (highest/lowest 25% will be '
'discarded).')
+ group.add_option('--bisect_mode',
+ type='choice',
+ choices=[BISECT_MODE_MEAN, BISECT_MODE_STD_DEV,
+ BISECT_MODE_RETURN_CODE],
+ default=BISECT_MODE_MEAN,
+ help='The bisect mode. Choices are to bisect on the '
+ 'difference in mean, std_dev, or return_code.')
parser.add_option_group(group)
group = optparse.OptionGroup(parser, 'Build options')
@@ -3586,7 +3750,7 @@ class BisectOptions(object):
if not opts.bad_revision:
raise RuntimeError('missing required parameter: --bad_revision')
- if not opts.metric:
+ if not opts.metric and opts.bisect_mode != BISECT_MODE_RETURN_CODE:
raise RuntimeError('missing required parameter: --metric')
if opts.gs_bucket:
@@ -3614,7 +3778,8 @@ class BisectOptions(object):
raise RuntimeError('missing required parameter: --working_directory')
metric_values = opts.metric.split('/')
- if len(metric_values) != 2:
+ if (len(metric_values) != 2 and
+ opts.bisect_mode != BISECT_MODE_RETURN_CODE):
raise RuntimeError("Invalid metric specified: [%s]" % opts.metric)
opts.metric = metric_values
diff --git a/tools/bisect-perf-regression_test.py b/tools/bisect-perf-regression_test.py
index 5d77643d78..7a50316c45 100644
--- a/tools/bisect-perf-regression_test.py
+++ b/tools/bisect-perf-regression_test.py
@@ -20,6 +20,37 @@ class BisectPerfRegressionTest(unittest.TestCase):
"""Cleans up the test environment after each test method."""
pass
+ def testParseDEPSStringManually(self):
+ """Tests DEPS parsing."""
+ bisect_options = bisect_perf_module.BisectOptions()
+ bisect_instance = bisect_perf_module.BisectPerformanceMetrics(
+ None, bisect_options)
+
+ deps_file_contents = """
+vars = {
+ 'ffmpeg_hash':
+ '@ac4a9f31fe2610bd146857bbd55d7a260003a888',
+ 'webkit_url':
+ 'https://chromium.googlesource.com/chromium/blink.git',
+ 'git_url':
+ 'https://chromium.googlesource.com',
+ 'webkit_rev':
+ '@e01ac0a267d1017288bc67fa3c366b10469d8a24',
+ 'angle_revision':
+ '74697cf2064c0a2c0d7e1b1b28db439286766a05'
+}"""
+
+ # Should only expect svn/git revisions to come through, and urls to be
+ # filtered out.
+ expected_vars_dict = {
+ 'ffmpeg_hash': '@ac4a9f31fe2610bd146857bbd55d7a260003a888',
+ 'webkit_rev': '@e01ac0a267d1017288bc67fa3c366b10469d8a24',
+ 'angle_revision': '74697cf2064c0a2c0d7e1b1b28db439286766a05'
+ }
+ vars_dict = bisect_instance._ParseRevisionsFromDEPSFileManually(
+ deps_file_contents)
+ self.assertEqual(vars_dict, expected_vars_dict)
+
def testCalculateTruncatedMeanRaisesError(self):
"""CalculateTrunctedMean raises an error when passed an empty list."""
with self.assertRaises(TypeError):
diff --git a/tools/bisect_utils.py b/tools/bisect_utils.py
index 68dc19adc8..fa2b4d2691 100644
--- a/tools/bisect_utils.py
+++ b/tools/bisect_utils.py
@@ -40,7 +40,12 @@ DEFAULT_GCLIENT_CUSTOM_DEPS = {
"chrome/deps/adobe/flash/binaries/ppapi/win/.git",
"src/third_party/adobe/flash/binaries/ppapi/win_x64":
"https://chrome-internal.googlesource.com/"
- "chrome/deps/adobe/flash/binaries/ppapi/win_x64/.git",}
+ "chrome/deps/adobe/flash/binaries/ppapi/win_x64/.git",
+ "src/chrome/tools/test/reference_build/chrome_win": None,
+ "src/chrome/tools/test/reference_build/chrome_mac": None,
+ "src/chrome/tools/test/reference_build/chrome_linux": None,
+ "src/third_party/WebKit/LayoutTests": None,
+ "src/tools/valgrind": None,}
GCLIENT_SPEC_DATA = [
{ "name" : "src",
diff --git a/tools/checkdeps/builddeps.py b/tools/checkdeps/builddeps.py
index 01e107b0b8..0057f635c2 100755
--- a/tools/checkdeps/builddeps.py
+++ b/tools/checkdeps/builddeps.py
@@ -75,9 +75,10 @@ backslashes. All directories should be relative to the source root and use
only lowercase.
"""
-import os
-import subprocess
import copy
+import os.path
+import posixpath
+import subprocess
from rules import Rule, Rules
@@ -97,14 +98,45 @@ SKIP_SUBDIRS_VAR_NAME = 'skip_child_includes'
def NormalizePath(path):
- """Returns a path normalized to how we write DEPS rules and compare paths.
- """
- return path.lower().replace('\\', '/')
+ """Returns a path normalized to how we write DEPS rules and compare paths."""
+ return os.path.normcase(path).replace(os.path.sep, posixpath.sep)
+
+
+def _GitSourceDirectories(base_directory):
+ """Returns set of normalized paths to subdirectories containing sources
+ managed by git."""
+ if not os.path.exists(os.path.join(base_directory, '.git')):
+ return set()
+
+ base_dir_norm = NormalizePath(base_directory)
+ git_source_directories = set([base_dir_norm])
+
+ git_ls_files_cmd = ['git', 'ls-files']
+ # FIXME: Use a context manager in Python 3.2+
+ popen = subprocess.Popen(git_ls_files_cmd,
+ stdout=subprocess.PIPE,
+ bufsize=1, # line buffering, since read by line
+ cwd=base_directory)
+ try:
+ try:
+ for line in popen.stdout:
+ dir_path = os.path.join(base_directory, os.path.dirname(line))
+ dir_path_norm = NormalizePath(dir_path)
+ # Add the directory as well as all the parent directories,
+ # stopping once we reach an already-listed directory.
+ while dir_path_norm not in git_source_directories:
+ git_source_directories.add(dir_path_norm)
+ dir_path_norm = posixpath.dirname(dir_path_norm)
+ finally:
+ popen.stdout.close()
+ finally:
+ popen.wait()
+
+ return git_source_directories
class DepsBuilder(object):
- """Parses include_rules from DEPS files.
- """
+ """Parses include_rules from DEPS files."""
def __init__(self,
base_directory=None,
@@ -115,28 +147,31 @@ class DepsBuilder(object):
"""Creates a new DepsBuilder.
Args:
- base_directory: OS-compatible path to root of checkout, e.g. C:\chr\src.
- verbose: Set to true for debug output.
- being_tested: Set to true to ignore the DEPS file at tools/checkdeps/DEPS.
+ base_directory: local path to root of checkout, e.g. C:\chr\src.
+ verbose: Set to True for debug output.
+ being_tested: Set to True to ignore the DEPS file at tools/checkdeps/DEPS.
ignore_temp_rules: Ignore rules that start with Rule.TEMP_ALLOW ("!").
"""
base_directory = (base_directory or
- os.path.join(os.path.dirname(__file__), '..', '..'))
- self.base_directory = os.path.abspath(base_directory)
+ os.path.join(os.path.dirname(__file__),
+ os.path.pardir, os.path.pardir))
+ self.base_directory = os.path.abspath(base_directory) # Local absolute path
self.verbose = verbose
self._under_test = being_tested
self._ignore_temp_rules = ignore_temp_rules
self._ignore_specific_rules = ignore_specific_rules
- self.git_source_directories = set()
- self._AddGitSourceDirectories()
+ # Set of normalized paths
+ self.git_source_directories = _GitSourceDirectories(self.base_directory)
# Map of normalized directory paths to rules to use for those
# directories, or None for directories that should be skipped.
+ # Normalized is: absolute, lowercase, / for separator.
self.directory_rules = {}
self._ApplyDirectoryRulesAndSkipSubdirs(Rules(), self.base_directory)
- def _ApplyRules(self, existing_rules, includes, specific_includes, cur_dir):
+ def _ApplyRules(self, existing_rules, includes, specific_includes,
+ cur_dir_norm):
"""Applies the given include rules, returning the new rules.
Args:
@@ -144,8 +179,8 @@ class DepsBuilder(object):
include: The list of rules from the "include_rules" section of DEPS.
specific_includes: E.g. {'.*_unittest\.cc': ['+foo', '-blat']} rules
from the "specific_include_rules" section of DEPS.
- cur_dir: The current directory, normalized path. We will create an
- implicit rule that allows inclusion from this directory.
+ cur_dir_norm: The current directory, normalized path. We will create an
+ implicit rule that allows inclusion from this directory.
Returns: A new set of rules combining the existing_rules with the other
arguments.
@@ -153,22 +188,21 @@ class DepsBuilder(object):
rules = copy.deepcopy(existing_rules)
# First apply the implicit "allow" rule for the current directory.
- if cur_dir.startswith(
- NormalizePath(os.path.normpath(self.base_directory))):
- relative_dir = cur_dir[len(self.base_directory) + 1:]
-
- source = relative_dir
- if len(source) == 0:
- source = 'top level' # Make the help string a little more meaningful.
- rules.AddRule('+' + relative_dir,
- relative_dir,
- 'Default rule for ' + source)
- else:
- raise Exception('Internal error: base directory is not at the beginning' +
- ' for\n %s and base dir\n %s' %
- (cur_dir, self.base_directory))
-
- def ApplyOneRule(rule_str, cur_dir, dependee_regexp=None):
+ base_dir_norm = NormalizePath(self.base_directory)
+ if not cur_dir_norm.startswith(base_dir_norm):
+ raise Exception(
+ 'Internal error: base directory is not at the beginning for\n'
+ ' %s and base dir\n'
+ ' %s' % (cur_dir_norm, base_dir_norm))
+ relative_dir = posixpath.relpath(cur_dir_norm, base_dir_norm)
+
+ # Make the help string a little more meaningful.
+ source = relative_dir or 'top level'
+ rules.AddRule('+' + relative_dir,
+ relative_dir,
+ 'Default rule for ' + source)
+
+ def ApplyOneRule(rule_str, dependee_regexp=None):
"""Deduces a sensible description for the rule being added, and
adds the rule with its description to |rules|.
@@ -181,58 +215,62 @@ class DepsBuilder(object):
rule_block_name = 'include_rules'
if dependee_regexp:
rule_block_name = 'specific_include_rules'
- if not relative_dir:
- rule_description = 'the top level %s' % rule_block_name
- else:
+ if relative_dir:
rule_description = relative_dir + "'s %s" % rule_block_name
+ else:
+ rule_description = 'the top level %s' % rule_block_name
rules.AddRule(rule_str, relative_dir, rule_description, dependee_regexp)
# Apply the additional explicit rules.
- for (_, rule_str) in enumerate(includes):
- ApplyOneRule(rule_str, cur_dir)
+ for rule_str in includes:
+ ApplyOneRule(rule_str)
# Finally, apply the specific rules.
- if not self._ignore_specific_rules:
- for regexp, specific_rules in specific_includes.iteritems():
- for rule_str in specific_rules:
- ApplyOneRule(rule_str, cur_dir, regexp)
+ if self._ignore_specific_rules:
+ return rules
+
+ for regexp, specific_rules in specific_includes.iteritems():
+ for rule_str in specific_rules:
+ ApplyOneRule(rule_str, regexp)
return rules
- def _ApplyDirectoryRules(self, existing_rules, dir_name):
+ def _ApplyDirectoryRules(self, existing_rules, dir_path_local_abs):
"""Combines rules from the existing rules and the new directory.
- Any directory can contain a DEPS file. Toplevel DEPS files can contain
+ Any directory can contain a DEPS file. Top-level DEPS files can contain
module dependencies which are used by gclient. We use these, along with
additional include rules and implicit rules for the given directory, to
come up with a combined set of rules to apply for the directory.
Args:
existing_rules: The rules for the parent directory. We'll add-on to these.
- dir_name: The directory name that the deps file may live in (if
- it exists). This will also be used to generate the
- implicit rules. This is a non-normalized path.
-
- Returns: A tuple containing: (1) the combined set of rules to apply to the
- sub-tree, and (2) a list of all subdirectories that should NOT be
- checked, as specified in the DEPS file (if any).
+ dir_path_local_abs: The directory path that the DEPS file may live in (if
+ it exists). This will also be used to generate the
+ implicit rules. This is a local path.
+
+ Returns: A 2-tuple of:
+ (1) the combined set of rules to apply to the sub-tree,
+ (2) a list of all subdirectories that should NOT be checked, as specified
+ in the DEPS file (if any).
+ Subdirectories are single words, hence no OS dependence.
"""
- norm_dir_name = NormalizePath(dir_name)
+ dir_path_norm = NormalizePath(dir_path_local_abs)
- # Check for a .svn directory in this directory or check this directory is
- # contained in git source direcotries. This will tell us if it's a source
+ # Check for a .svn directory in this directory or that this directory is
+ # contained in git source directories. This will tell us if it's a source
# directory and should be checked.
- if not (os.path.exists(os.path.join(dir_name, ".svn")) or
- (norm_dir_name in self.git_source_directories)):
- return (None, [])
+ if not (os.path.exists(os.path.join(dir_path_local_abs, '.svn')) or
+ dir_path_norm in self.git_source_directories):
+ return None, []
# Check the DEPS file in this directory.
if self.verbose:
- print 'Applying rules from', dir_name
- def FromImpl(_unused, _unused2):
+ print 'Applying rules from', dir_path_local_abs
+ def FromImpl(*_):
pass # NOP function so "From" doesn't fail.
- def FileImpl(_unused):
+ def FileImpl(_):
pass # NOP function so "File" doesn't fail.
class _VarImpl:
@@ -241,17 +279,18 @@ class DepsBuilder(object):
def Lookup(self, var_name):
"""Implements the Var syntax."""
- if var_name in self._local_scope.get('vars', {}):
+ try:
return self._local_scope['vars'][var_name]
- raise Exception('Var is not defined: %s' % var_name)
+ except KeyError:
+ raise Exception('Var is not defined: %s' % var_name)
local_scope = {}
global_scope = {
- 'File': FileImpl,
- 'From': FromImpl,
- 'Var': _VarImpl(local_scope).Lookup,
- }
- deps_file = os.path.join(dir_name, 'DEPS')
+ 'File': FileImpl,
+ 'From': FromImpl,
+ 'Var': _VarImpl(local_scope).Lookup,
+ }
+ deps_file_path = os.path.join(dir_path_local_abs, 'DEPS')
# The second conditional here is to disregard the
# tools/checkdeps/DEPS file while running tests. This DEPS file
@@ -261,11 +300,12 @@ class DepsBuilder(object):
# running tests, we absolutely need to verify the contents of that
# directory to trigger those intended violations and see that they
# are handled correctly.
- if os.path.isfile(deps_file) and (
- not self._under_test or not os.path.split(dir_name)[1] == 'checkdeps'):
- execfile(deps_file, global_scope, local_scope)
+ if os.path.isfile(deps_file_path) and not (
+ self._under_test and
+ os.path.basename(dir_path_local_abs) == 'checkdeps'):
+ execfile(deps_file_path, global_scope, local_scope)
elif self.verbose:
- print ' No deps file found in', dir_name
+ print ' No deps file found in', dir_path_local_abs
# Even if a DEPS file does not exist we still invoke ApplyRules
# to apply the implicit "allow" rule for the current directory
@@ -275,72 +315,61 @@ class DepsBuilder(object):
skip_subdirs = local_scope.get(SKIP_SUBDIRS_VAR_NAME, [])
return (self._ApplyRules(existing_rules, include_rules,
- specific_include_rules, norm_dir_name),
+ specific_include_rules, dir_path_norm),
skip_subdirs)
- def _ApplyDirectoryRulesAndSkipSubdirs(self, parent_rules, dir_path):
- """Given |parent_rules| and a subdirectory |dir_path| from the
- directory that owns the |parent_rules|, add |dir_path|'s rules to
+ def _ApplyDirectoryRulesAndSkipSubdirs(self, parent_rules,
+ dir_path_local_abs):
+ """Given |parent_rules| and a subdirectory |dir_path_local_abs| of the
+ directory that owns the |parent_rules|, add |dir_path_local_abs|'s rules to
|self.directory_rules|, and add None entries for any of its
subdirectories that should be skipped.
"""
- directory_rules, excluded_subdirs = self._ApplyDirectoryRules(parent_rules,
- dir_path)
- self.directory_rules[NormalizePath(dir_path)] = directory_rules
+ directory_rules, excluded_subdirs = self._ApplyDirectoryRules(
+ parent_rules, dir_path_local_abs)
+ dir_path_norm = NormalizePath(dir_path_local_abs)
+ self.directory_rules[dir_path_norm] = directory_rules
for subdir in excluded_subdirs:
- self.directory_rules[NormalizePath(
- os.path.normpath(os.path.join(dir_path, subdir)))] = None
+ subdir_path_norm = posixpath.join(dir_path_norm, subdir)
+ self.directory_rules[subdir_path_norm] = None
- def GetDirectoryRules(self, dir_path):
+ def GetDirectoryRules(self, dir_path_local):
"""Returns a Rules object to use for the given directory, or None
- if the given directory should be skipped. This takes care of
- first building rules for parent directories (up to
- self.base_directory) if needed.
+ if the given directory should be skipped.
- Args:
- dir_path: A real (non-normalized) path to the directory you want
- rules for.
- """
- norm_dir_path = NormalizePath(dir_path)
-
- if not norm_dir_path.startswith(
- NormalizePath(os.path.normpath(self.base_directory))):
- dir_path = os.path.join(self.base_directory, dir_path)
- norm_dir_path = NormalizePath(dir_path)
-
- parent_dir = os.path.dirname(dir_path)
- parent_rules = None
- if not norm_dir_path in self.directory_rules:
- parent_rules = self.GetDirectoryRules(parent_dir)
-
- # We need to check for an entry for our dir_path again, in case we
- # are at a path e.g. A/B/C where A/B/DEPS specifies the C
- # subdirectory to be skipped; in this case, the invocation to
- # GetDirectoryRules(parent_dir) has already filled in an entry for
- # A/B/C.
- if not norm_dir_path in self.directory_rules:
- if not parent_rules:
- # If the parent directory should be skipped, then the current
- # directory should also be skipped.
- self.directory_rules[norm_dir_path] = None
- else:
- self._ApplyDirectoryRulesAndSkipSubdirs(parent_rules, dir_path)
- return self.directory_rules[norm_dir_path]
+ Also modifies |self.directory_rules| to store the Rules.
+ This takes care of first building rules for parent directories (up to
+ |self.base_directory|) if needed, which may add rules for skipped
+ subdirectories.
- def _AddGitSourceDirectories(self):
- """Adds any directories containing sources managed by git to
- self.git_source_directories.
+ Args:
+ dir_path_local: A local path to the directory you want rules for.
+ Can be relative and unnormalized.
"""
- if not os.path.exists(os.path.join(self.base_directory, '.git')):
- return
-
- popen_out = os.popen('cd %s && git ls-files --full-name .' %
- subprocess.list2cmdline([self.base_directory]))
- for line in popen_out.readlines():
- dir_name = os.path.join(self.base_directory, os.path.dirname(line))
- # Add the directory as well as all the parent directories. Use
- # forward slashes and lower case to normalize paths.
- while dir_name != self.base_directory:
- self.git_source_directories.add(NormalizePath(dir_name))
- dir_name = os.path.dirname(dir_name)
- self.git_source_directories.add(NormalizePath(self.base_directory))
+ if os.path.isabs(dir_path_local):
+ dir_path_local_abs = dir_path_local
+ else:
+ dir_path_local_abs = os.path.join(self.base_directory, dir_path_local)
+ dir_path_norm = NormalizePath(dir_path_local_abs)
+
+ if dir_path_norm in self.directory_rules:
+ return self.directory_rules[dir_path_norm]
+
+ parent_dir_local_abs = os.path.dirname(dir_path_local_abs)
+ parent_rules = self.GetDirectoryRules(parent_dir_local_abs)
+ # We need to check for an entry for our dir_path again, since
+ # GetDirectoryRules can modify entries for subdirectories, namely setting
+ # to None if they should be skipped, via _ApplyDirectoryRulesAndSkipSubdirs.
+ # For example, if dir_path == 'A/B/C' and A/B/DEPS specifies that the C
+ # subdirectory be skipped, GetDirectoryRules('A/B') will fill in the entry
+ # for 'A/B/C' as None.
+ if dir_path_norm in self.directory_rules:
+ return self.directory_rules[dir_path_norm]
+
+ if parent_rules:
+ self._ApplyDirectoryRulesAndSkipSubdirs(parent_rules, dir_path_local_abs)
+ else:
+ # If the parent directory should be skipped, then the current
+ # directory should also be skipped.
+ self.directory_rules[dir_path_norm] = None
+ return self.directory_rules[dir_path_norm]
diff --git a/tools/checkdeps/checkdeps.py b/tools/checkdeps/checkdeps.py
index 6e815c0e2c..5bf7907606 100755
--- a/tools/checkdeps/checkdeps.py
+++ b/tools/checkdeps/checkdeps.py
@@ -33,7 +33,7 @@ def _IsTestFile(filename):
class DepsChecker(DepsBuilder):
- """Parses include_rules from DEPS files and erifies files in the
+ """Parses include_rules from DEPS files and verifies files in the
source tree against them.
"""
@@ -83,7 +83,7 @@ class DepsChecker(DepsBuilder):
def _CheckDirectoryImpl(self, checkers, dir_name):
rules = self.GetDirectoryRules(dir_name)
- if rules == None:
+ if rules is None:
return
# Collect a list of all files and directories to check.
@@ -125,18 +125,21 @@ class DepsChecker(DepsBuilder):
problems = []
for file_path, include_lines in added_includes:
if not cpp.IsCppFile(file_path):
- pass
+ continue
rules_for_file = self.GetDirectoryRules(os.path.dirname(file_path))
- if rules_for_file:
- for line in include_lines:
- is_include, violation = cpp.CheckLine(
- rules_for_file, line, file_path, True)
- if violation:
- rule_type = violation.violated_rule.allow
- if rule_type != Rule.ALLOW:
- violation_text = results.NormalResultsFormatter.FormatViolation(
- violation, self.verbose)
- problems.append((file_path, rule_type, violation_text))
+ if not rules_for_file:
+ continue
+ for line in include_lines:
+ is_include, violation = cpp.CheckLine(
+ rules_for_file, line, file_path, True)
+ if not violation:
+ continue
+ rule_type = violation.violated_rule.allow
+ if rule_type == Rule.ALLOW:
+ continue
+ violation_text = results.NormalResultsFormatter.FormatViolation(
+ violation, self.verbose)
+ problems.append((file_path, rule_type, violation_text))
return problems
diff --git a/tools/checkdeps/cpp_checker.py b/tools/checkdeps/cpp_checker.py
index 9bcd14d75c..94fd37a921 100644
--- a/tools/checkdeps/cpp_checker.py
+++ b/tools/checkdeps/cpp_checker.py
@@ -62,7 +62,7 @@ class CppChecker(object):
# Don't fail when no directory is specified. We may want to be more
# strict about this in the future.
if self._verbose:
- print ' WARNING: directory specified with no path: ' + include_path
+ print ' WARNING: include specified with no directory: ' + include_path
return True, None
rule = rules.RuleApplyingTo(include_path, dependee_path)
@@ -86,7 +86,7 @@ class CppChecker(object):
line = line.strip()
- # Check to see if we're at / inside a #if 0 block
+ # Check to see if we're at / inside an #if 0 block
if line.startswith('#if 0'):
in_if0 += 1
continue
diff --git a/tools/checkdeps/rules.py b/tools/checkdeps/rules.py
index b8a07df005..199c18f367 100644
--- a/tools/checkdeps/rules.py
+++ b/tools/checkdeps/rules.py
@@ -35,7 +35,7 @@ class Rule(object):
which is fully self-sufficient to answer the question whether the dependent
is allowed to depend on the dependee, without knowing the external
context."""
- return (self.allow, self._dependent_dir or '.', self._dir or '.')
+ return self.allow, self._dependent_dir or '.', self._dir or '.'
def ParentOrMatch(self, other):
"""Returns true if the input string is an exact match or is a parent
@@ -74,7 +74,7 @@ def ParseRuleString(rule_string, source):
'The rule string "%s" does not begin with a "+", "-" or "!".' %
rule_string)
- return (rule_string[0], rule_string[1:])
+ return rule_string[0], rule_string[1:]
class Rules(object):
@@ -141,7 +141,7 @@ class Rules(object):
matches the expression. None to match all
dependee files.
"""
- (rule_type, rule_dir) = ParseRuleString(rule_string, source)
+ rule_type, rule_dir = ParseRuleString(rule_string, source)
if not dependee_regexp:
rules_to_update = self._general_rules
diff --git a/tools/clang/blink_gc_plugin/RecordInfo.cpp b/tools/clang/blink_gc_plugin/RecordInfo.cpp
index fb3c4a2464..bbe4e58e11 100644
--- a/tools/clang/blink_gc_plugin/RecordInfo.cpp
+++ b/tools/clang/blink_gc_plugin/RecordInfo.cpp
@@ -134,19 +134,22 @@ bool RecordInfo::IsTreeShared() {
}
// A GC mixin is a class that inherits from a GC mixin base and has
-// has not yet been "mixed in" with another GC base class.
+// not yet been "mixed in" with another GC base class.
bool RecordInfo::IsGCMixin() {
if (!IsGCDerived() || base_paths_->begin() == base_paths_->end())
return false;
- // Get the last element of the first path.
- CXXBasePaths::paths_iterator it = base_paths_->begin();
- const CXXBasePathElement& elem = (*it)[it->size() - 1];
- CXXRecordDecl* base = elem.Base->getType()->getAsCXXRecordDecl();
- // If it is not a mixin base we are done.
- if (!Config::IsGCMixinBase(base->getName()))
- return false;
- // This is a mixin if there are no other paths to GC bases.
- return ++it == base_paths_->end();
+ for (CXXBasePaths::paths_iterator it = base_paths_->begin();
+ it != base_paths_->end();
+ ++it) {
+ // Get the last element of the path.
+ const CXXBasePathElement& elem = (*it)[it->size() - 1];
+ CXXRecordDecl* base = elem.Base->getType()->getAsCXXRecordDecl();
+ // If it is not a mixin base we are done.
+ if (!Config::IsGCMixinBase(base->getName()))
+ return false;
+ }
+ // This is a mixin if all GC bases are mixins.
+ return true;
}
// Test if a record is allocated on the managed heap.
diff --git a/tools/clang/scripts/update.py b/tools/clang/scripts/update.py
index e57edf3ffd..45a3408a19 100755
--- a/tools/clang/scripts/update.py
+++ b/tools/clang/scripts/update.py
@@ -66,18 +66,10 @@ def DeleteFiles(dir, pattern):
def ClobberChromiumBuildFiles():
"""Clobber Chomium build files."""
print 'Clobbering Chromium build files...'
- n = 0
- dirs = [
- os.path.join(CHROMIUM_DIR, 'out/Debug'),
- os.path.join(CHROMIUM_DIR, 'out/Release'),
- ]
- for d in dirs:
- if not os.path.exists(d):
- continue
- n += DeleteFiles(d, r'.*\.o')
- n += DeleteFiles(d, r'.*\.obj')
- n += DeleteFiles(d, r'stamp.untar')
- print 'Removed %d files.' % (n)
+ out_dir = os.path.join(CHROMIUM_DIR, 'out')
+ if os.path.isdir(out_dir):
+ shutil.rmtree(out_dir)
+ print 'Removed Chromium out dir: %s.' % (out_dir)
def RunCommand(command, tries=1):
diff --git a/tools/clang/scripts/update.sh b/tools/clang/scripts/update.sh
index bdf701dbf1..fe16d1261d 100755
--- a/tools/clang/scripts/update.sh
+++ b/tools/clang/scripts/update.sh
@@ -171,31 +171,18 @@ fi
rm -f "${STAMP_FILE}"
-# Clobber build files. PCH files only work with the compiler that created them.
-# We delete .o files to make sure all files are built with the new compiler.
+# Clobber all output files. PCH files only work with the compiler that created
+# them, so we need clobber the output files to make sure they are rebuilt
+# using the new compiler.
echo "Clobbering build files"
MAKE_DIR="${THIS_DIR}/../../../out"
XCODEBUILD_DIR="${THIS_DIR}/../../../xcodebuild"
-for DIR in "${XCODEBUILD_DIR}" "${MAKE_DIR}/Debug" "${MAKE_DIR}/Release"; do
+for DIR in "${XCODEBUILD_DIR}" "${MAKE_DIR}"; do
if [[ -d "${DIR}" ]]; then
- find "${DIR}" -name '*.o' -exec rm {} +
- find "${DIR}" -name '*.o.d' -exec rm {} +
- find "${DIR}" -name '*.gch' -exec rm {} +
- find "${DIR}" -name '*.dylib' -exec rm -rf {} +
- find "${DIR}" -name 'SharedPrecompiledHeaders' -exec rm -rf {} +
+ rm -rf "${DIR}"
fi
done
-# Clobber NaCl toolchain stamp files, see http://crbug.com/159793
-if [[ -d "${MAKE_DIR}" ]]; then
- find "${MAKE_DIR}" -name 'stamp.untar' -exec rm {} +
-fi
-if [[ "${OS}" = "Darwin" ]]; then
- if [[ -d "${XCODEBUILD_DIR}" ]]; then
- find "${XCODEBUILD_DIR}" -name 'stamp.untar' -exec rm {} +
- fi
-fi
-
if [[ -z "$force_local_build" ]]; then
# Check if there's a prebuilt binary and if so just fetch that. That's faster,
# and goma relies on having matching binary hashes on client and server too.
diff --git a/tools/cr/cr/actions/ninja.py b/tools/cr/cr/actions/ninja.py
index d2e77b0aac..db3b3b2966 100644
--- a/tools/cr/cr/actions/ninja.py
+++ b/tools/cr/cr/actions/ninja.py
@@ -4,6 +4,7 @@
"""A module to add ninja support to cr."""
+import multiprocessing
import os
import cr
@@ -20,10 +21,11 @@ class NinjaBuilder(cr.Builder):
"""An implementation of Builder that uses ninja to do the actual build."""
# Some basic configuration installed if we are enabled.
+ EXTRA_FOR_IO_BOUND_JOBS = 2
ENABLED = cr.Config.From(
NINJA_BINARY=os.path.join('{DEPOT_TOOLS}', 'ninja'),
- NINJA_JOBS=10,
- NINJA_PROCESSORS=4,
+ NINJA_JOBS=multiprocessing.cpu_count() + EXTRA_FOR_IO_BOUND_JOBS,
+ NINJA_PROCESSORS=multiprocessing.cpu_count(),
NINJA_BUILD_FILE=os.path.join('{CR_BUILD_DIR}', 'build.ninja'),
# Don't rename to GOMA_* or Goma will complain: "unkown GOMA_ parameter".
NINJA_GOMA_LINE='cc = {CR_GOMA_CC} $',
@@ -35,8 +37,7 @@ class NinjaBuilder(cr.Builder):
GOMA_DIR='{CR_GOMA_DIR}',
GYP_DEF_gomadir='{CR_GOMA_DIR}',
GYP_DEF_use_goma=1,
- NINJA_JOBS=200,
- NINJA_PROCESSORS=12,
+ NINJA_JOBS=multiprocessing.cpu_count() * 10,
)
# A placeholder for the system detected configuration
DETECTED = cr.Config('DETECTED')
diff --git a/tools/cr/cr/base/android.py b/tools/cr/cr/base/android.py
index d876b29220..6b25f584b1 100644
--- a/tools/cr/cr/base/android.py
+++ b/tools/cr/cr/base/android.py
@@ -37,7 +37,6 @@ class AndroidPlatform(cr.Platform):
CR_TEST_RUNNER=os.path.join(
'{CR_SRC}', 'build', 'android', 'test_runner.py'),
CR_ADB_GDB=os.path.join('{CR_SRC}', 'build', 'android', 'adb_gdb'),
- CHROMIUM_OUT_DIR='{CR_OUT_BASE}',
CR_DEFAULT_TARGET='chrome_shell',
GYP_DEF_OS='android'
)
diff --git a/tools/cr/cr/base/platform.py b/tools/cr/cr/base/platform.py
index 0220d9f164..31269521f1 100644
--- a/tools/cr/cr/base/platform.py
+++ b/tools/cr/cr/base/platform.py
@@ -11,7 +11,7 @@ import cr
DEFAULT = cr.Config.From(
DEPOT_TOOLS=os.path.join('{GOOGLE_CODE}', 'depot_tools'),
-)
+ CHROMIUM_OUT_DIR='{CR_OUT_BASE}',)
class Platform(cr.Plugin, cr.Plugin.Type):
diff --git a/tools/gn/escape.cc b/tools/gn/escape.cc
index e10e24a2b6..c1acc722a9 100644
--- a/tools/gn/escape.cc
+++ b/tools/gn/escape.cc
@@ -58,6 +58,9 @@ void EscapeStringToString(const base::StringPiece& str,
} else if (str[i] == '\\' && (options.mode & ESCAPE_JSON)) {
dest->push_back('\\');
dest->push_back('\\');
+ } else if (str[i] == ':' && (options.mode & ESCAPE_NINJA)) {
+ dest->push_back('$');
+ dest->push_back(':');
} else {
dest->push_back(str[i]);
}
diff --git a/tools/gn/escape_unittest.cc b/tools/gn/escape_unittest.cc
index a637e87a3a..44440de6e9 100644
--- a/tools/gn/escape_unittest.cc
+++ b/tools/gn/escape_unittest.cc
@@ -5,6 +5,25 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "tools/gn/escape.h"
+TEST(Escape, Ninja) {
+ EscapeOptions opts;
+ opts.mode = ESCAPE_NINJA;
+ std::string result = EscapeString("asdf: \"$\\bar", opts, NULL);
+ EXPECT_EQ("asdf$:$ \"$$\\bar", result);
+}
+
+TEST(Escape, Shell) {
+ EscapeOptions opts;
+ opts.mode = ESCAPE_SHELL;
+ std::string result = EscapeString("asdf: \"$\\bar", opts, NULL);
+#if defined(OS_WIN)
+ // Windows shell doesn't escape backslashes.
+ EXPECT_EQ("\"asdf: \"$\\bar\"", result);
+#else
+ EXPECT_EQ("\"asdf: \\\"$\\\\bar\"", result);
+#endif
+}
+
TEST(Escape, UsedQuotes) {
EscapeOptions shell_options;
shell_options.mode = ESCAPE_SHELL;
diff --git a/tools/gn/filesystem_utils.cc b/tools/gn/filesystem_utils.cc
index a5787a2ebe..0bc266dc58 100644
--- a/tools/gn/filesystem_utils.cc
+++ b/tools/gn/filesystem_utils.cc
@@ -313,6 +313,23 @@ base::StringPiece FindDir(const std::string* path) {
return base::StringPiece(path->data(), filename_offset);
}
+base::StringPiece FindLastDirComponent(const SourceDir& dir) {
+ const std::string& dir_string = dir.value();
+
+ if (dir_string.empty())
+ return base::StringPiece();
+ int cur = static_cast<int>(dir_string.size()) - 1;
+ DCHECK(dir_string[cur] == '/');
+ int end = cur;
+ cur--; // Skip before the last slash.
+
+ for (; cur >= 0; cur--) {
+ if (dir_string[cur] == '/')
+ return base::StringPiece(&dir_string[cur + 1], end - cur - 1);
+ }
+ return base::StringPiece(&dir_string[0], end);
+}
+
bool EnsureStringIsInOutputDir(const SourceDir& dir,
const std::string& str,
const Value& originating,
diff --git a/tools/gn/filesystem_utils.h b/tools/gn/filesystem_utils.h
index a6bb0fe472..cb5fbfd3f9 100644
--- a/tools/gn/filesystem_utils.h
+++ b/tools/gn/filesystem_utils.h
@@ -92,6 +92,10 @@ bool EndsWithSlash(const std::string& s);
// input pointer must outlive the output.
base::StringPiece FindDir(const std::string* path);
+// Returns the substring identifying the last component of the dir, or the
+// empty substring if none. For example "//foo/bar/" -> "bar".
+base::StringPiece FindLastDirComponent(const SourceDir& dir);
+
// Verifies that the given string references a file inside of the given
// directory. This is pretty stupid and doesn't handle "." and "..", etc.,
// it is designed for a sanity check to keep people from writing output files
diff --git a/tools/gn/filesystem_utils_unittest.cc b/tools/gn/filesystem_utils_unittest.cc
index 3416e1659f..ec1a53e763 100644
--- a/tools/gn/filesystem_utils_unittest.cc
+++ b/tools/gn/filesystem_utils_unittest.cc
@@ -73,6 +73,23 @@ TEST(FilesystemUtils, FindDir) {
EXPECT_EQ("foo/bar/", FindDir(&input));
}
+TEST(FilesystemUtils, FindLastDirComponent) {
+ SourceDir empty;
+ EXPECT_EQ("", FindLastDirComponent(empty));
+
+ SourceDir root("/");
+ EXPECT_EQ("", FindLastDirComponent(root));
+
+ SourceDir srcroot("//");
+ EXPECT_EQ("", FindLastDirComponent(srcroot));
+
+ SourceDir regular1("//foo/");
+ EXPECT_EQ("foo", FindLastDirComponent(regular1));
+
+ SourceDir regular2("//foo/bar/");
+ EXPECT_EQ("bar", FindLastDirComponent(regular2));
+}
+
TEST(FilesystemUtils, IsPathAbsolute) {
EXPECT_TRUE(IsPathAbsolute("/foo/bar"));
EXPECT_TRUE(IsPathAbsolute("/"));
diff --git a/tools/gn/input_file_manager.cc b/tools/gn/input_file_manager.cc
index 04578b6064..097474ae06 100644
--- a/tools/gn/input_file_manager.cc
+++ b/tools/gn/input_file_manager.cc
@@ -20,6 +20,61 @@ void InvokeFileLoadCallback(const InputFileManager::FileLoadCallback& cb,
cb.Run(node);
}
+bool DoLoadFile(const LocationRange& origin,
+ const BuildSettings* build_settings,
+ const SourceFile& name,
+ InputFile* file,
+ std::vector<Token>* tokens,
+ scoped_ptr<ParseNode>* root,
+ Err* err) {
+ // Do all of this stuff outside the lock. We should not give out file
+ // pointers until the read is complete.
+ if (g_scheduler->verbose_logging()) {
+ std::string logmsg = name.value();
+ if (origin.begin().file())
+ logmsg += " (referenced from " + origin.begin().Describe(false) + ")";
+ g_scheduler->Log("Loading", logmsg);
+ }
+
+ // Read.
+ base::FilePath primary_path = build_settings->GetFullPath(name);
+ ScopedTrace load_trace(TraceItem::TRACE_FILE_LOAD, name.value());
+ if (!file->Load(primary_path)) {
+ if (!build_settings->secondary_source_path().empty()) {
+ // Fall back to secondary source tree.
+ base::FilePath secondary_path =
+ build_settings->GetFullPathSecondary(name);
+ if (!file->Load(secondary_path)) {
+ *err = Err(origin, "Can't load input file.",
+ "Unable to load either \n" +
+ FilePathToUTF8(primary_path) + " or \n" +
+ FilePathToUTF8(secondary_path));
+ return false;
+ }
+ } else {
+ *err = Err(origin,
+ "Unable to load \"" + FilePathToUTF8(primary_path) + "\".");
+ return false;
+ }
+ }
+ load_trace.Done();
+
+ ScopedTrace exec_trace(TraceItem::TRACE_FILE_PARSE, name.value());
+
+ // Tokenize.
+ *tokens = Tokenizer::Tokenize(file, err);
+ if (err->has_error())
+ return false;
+
+ // Parse.
+ *root = Parser::Parse(*tokens, err);
+ if (err->has_error())
+ return false;
+
+ exec_trace.Done();
+ return true;
+}
+
} // namespace
InputFileManager::InputFileData::InputFileData(const SourceFile& file_name)
@@ -211,53 +266,17 @@ bool InputFileManager::LoadFile(const LocationRange& origin,
const SourceFile& name,
InputFile* file,
Err* err) {
- // Do all of this stuff outside the lock. We should not give out file
- // pointers until the read is complete.
- if (g_scheduler->verbose_logging()) {
- std::string logmsg = name.value();
- if (origin.begin().file())
- logmsg += " (referenced from " + origin.begin().Describe(false) + ")";
- g_scheduler->Log("Loading", logmsg);
- }
-
- // Read.
- base::FilePath primary_path = build_settings->GetFullPath(name);
- ScopedTrace load_trace(TraceItem::TRACE_FILE_LOAD, name.value());
- if (!file->Load(primary_path)) {
- if (!build_settings->secondary_source_path().empty()) {
- // Fall back to secondary source tree.
- base::FilePath secondary_path =
- build_settings->GetFullPathSecondary(name);
- if (!file->Load(secondary_path)) {
- *err = Err(origin, "Can't load input file.",
- "Unable to load either \n" +
- FilePathToUTF8(primary_path) + " or \n" +
- FilePathToUTF8(secondary_path));
- return false;
- }
- } else {
- *err = Err(origin,
- "Unable to load \"" + FilePathToUTF8(primary_path) + "\".");
- return false;
- }
- }
- load_trace.Done();
-
- ScopedTrace exec_trace(TraceItem::TRACE_FILE_PARSE, name.value());
-
- // Tokenize.
- std::vector<Token> tokens = Tokenizer::Tokenize(file, err);
- if (err->has_error())
- return false;
-
- // Parse.
- scoped_ptr<ParseNode> root = Parser::Parse(tokens, err);
- if (err->has_error())
- return false;
+ std::vector<Token> tokens;
+ scoped_ptr<ParseNode> root;
+ bool success = DoLoadFile(origin, build_settings, name, file,
+ &tokens, &root, err);
+ // Can't return early. We have to ensure that the completion event is
+ // signaled in all cases bacause another thread could be blocked on this one.
+
+ // Save this pointer for running the callbacks below, which happens after the
+ // scoped ptr ownership is taken away inside the lock.
ParseNode* unowned_root = root.get();
- exec_trace.Done();
-
std::vector<FileLoadCallback> callbacks;
{
base::AutoLock lock(lock_);
@@ -265,8 +284,10 @@ bool InputFileManager::LoadFile(const LocationRange& origin,
InputFileData* data = input_files_[name];
data->loaded = true;
- data->tokens.swap(tokens);
- data->parsed_root = root.Pass();
+ if (success) {
+ data->tokens.swap(tokens);
+ data->parsed_root = root.Pass();
+ }
// Unblock waiters on this event.
//
@@ -288,7 +309,9 @@ bool InputFileManager::LoadFile(const LocationRange& origin,
// Run pending invocations. Theoretically we could schedule each of these
// separately to get some parallelism. But normally there will only be one
// item in the list, so that's extra overhead and complexity for no gain.
- for (size_t i = 0; i < callbacks.size(); i++)
- callbacks[i].Run(unowned_root);
- return true;
+ if (success) {
+ for (size_t i = 0; i < callbacks.size(); i++)
+ callbacks[i].Run(unowned_root);
+ }
+ return success;
}
diff --git a/tools/gn/ninja_action_target_writer_unittest.cc b/tools/gn/ninja_action_target_writer_unittest.cc
index c1aa6f495b..87903e7e01 100644
--- a/tools/gn/ninja_action_target_writer_unittest.cc
+++ b/tools/gn/ninja_action_target_writer_unittest.cc
@@ -148,11 +148,11 @@ TEST(NinjaActionTargetWriter, ActionWithSources) {
// depending if we're on actual Windows or Linux pretending to be Windows.
const char expected_win[] =
"rule __foo_bar___rule\n"
- " command = C:/python/python.exe gyp-win-tool action-wrapper environment.x86 __foo_bar___rule.$unique_name.rsp\n"
+ " command = C$:/python/python.exe gyp-win-tool action-wrapper environment.x86 __foo_bar___rule.$unique_name.rsp\n"
" description = ACTION //foo:bar()\n"
" restat = 1\n"
" rspfile = __foo_bar___rule.$unique_name.rsp\n"
- " rspfile_content = C:/python/python.exe ../../foo/script.py\n"
+ " rspfile_content = C$:/python/python.exe ../../foo/script.py\n"
"\n"
"build foo.out: __foo_bar___rule | ../../foo/included.txt ../../foo/source.txt\n"
"\n"
@@ -237,12 +237,12 @@ TEST(NinjaActionTargetWriter, ForEach) {
// depending if we're on actual Windows or Linux pretending to be Windows.
const char expected_win[] =
"rule __foo_bar___rule\n"
- " command = C:/python/python.exe gyp-win-tool action-wrapper "
+ " command = C$:/python/python.exe gyp-win-tool action-wrapper "
"environment.x86 __foo_bar___rule.$unique_name.rsp\n"
" description = ACTION //foo:bar()\n"
" restat = 1\n"
" rspfile = __foo_bar___rule.$unique_name.rsp\n"
- " rspfile_content = C:/python/python.exe ../../foo/script.py -i "
+ " rspfile_content = C$:/python/python.exe ../../foo/script.py -i "
"${source} \"--out=foo$ bar${source_name_part}.o\"\n"
"\n"
"build input1.out: __foo_bar___rule ../../foo/input1.txt | "
@@ -341,12 +341,12 @@ TEST(NinjaActionTargetWriter, ForEachWithDepfile) {
// depending if we're on actual Windows or Linux pretending to be Windows.
const char expected_win[] =
"rule __foo_bar___rule\n"
- " command = C:/python/python.exe gyp-win-tool action-wrapper "
+ " command = C$:/python/python.exe gyp-win-tool action-wrapper "
"environment.x86 __foo_bar___rule.$unique_name.rsp\n"
" description = ACTION //foo:bar()\n"
" restat = 1\n"
" rspfile = __foo_bar___rule.$unique_name.rsp\n"
- " rspfile_content = C:/python/python.exe ../../foo/script.py -i "
+ " rspfile_content = C$:/python/python.exe ../../foo/script.py -i "
"${source} \"--out=foo$ bar${source_name_part}.o\"\n"
"\n"
"build gen/input1.d input1.out: __foo_bar___rule ../../foo/input1.txt"
diff --git a/tools/gn/ninja_build_writer.cc b/tools/gn/ninja_build_writer.cc
index 11b36e6b59..c0fc7fcfed 100644
--- a/tools/gn/ninja_build_writer.cc
+++ b/tools/gn/ninja_build_writer.cc
@@ -11,6 +11,7 @@
#include "base/file_util.h"
#include "base/path_service.h"
#include "base/process/process_handle.h"
+#include "base/strings/string_util.h"
#include "base/strings/utf_string_conversions.h"
#include "build/build_config.h"
#include "tools/gn/build_settings.h"
@@ -179,30 +180,87 @@ void NinjaBuildWriter::WritePhonyAndAllRules() {
// Write phony rules for all uniquely-named targets in the default toolchain.
// Don't do other toolchains or we'll get naming conflicts, and if the name
- // isn't unique, also skip it.
+ // isn't unique, also skip it. The exception is for the toplevel targets
+ // which we also find.
std::map<std::string, int> small_name_count;
- for (size_t i = 0; i < default_toolchain_targets_.size(); i++)
- small_name_count[default_toolchain_targets_[i]->label().name()]++;
-
+ std::vector<const Target*> toplevel_targets;
for (size_t i = 0; i < default_toolchain_targets_.size(); i++) {
const Target* target = default_toolchain_targets_[i];
+ const Label& label = target->label();
+ small_name_count[label.name()]++;
+
+ // Look for targets with a name of the form
+ // dir = "//foo/", name = "foo"
+ // i.e. where the target name matches the top level directory. We will
+ // always write phony rules for these even if there is another target with
+ // the same short name.
+ const std::string& dir_string = label.dir().value();
+ if (dir_string.size() == label.name().size() + 3 && // Size matches.
+ dir_string[0] == '/' && dir_string[1] == '/' && // "//" at beginning.
+ dir_string[dir_string.size() - 1] == '/' && // "/" at end.
+ dir_string.compare(2, label.name().size(), label.name()) == 0)
+ toplevel_targets.push_back(target);
+ }
+ for (size_t i = 0; i < default_toolchain_targets_.size(); i++) {
+ const Target* target = default_toolchain_targets_[i];
+ const Label& label = target->label();
OutputFile target_file = helper_.GetTargetOutputFile(target);
- if (target_file.value() != target->label().name() &&
- small_name_count[default_toolchain_targets_[i]->label().name()] == 1) {
- out_ << "build " << target->label().name() << ": phony ";
- path_output_.WriteFile(out_, target_file);
- out_ << std::endl;
+
+ // Write the long name "foo/bar:baz" for the target "//foo/bar:baz".
+ std::string long_name = label.GetUserVisibleName(false);
+ base::TrimString(long_name, "/", &long_name);
+ WritePhonyRule(target, target_file, long_name);
+
+ // Write the directory name with no target name if they match
+ // (e.g. "//foo/bar:bar" -> "foo/bar").
+ if (FindLastDirComponent(label.dir()) == label.name()) {
+ std::string medium_name = DirectoryWithNoLastSlash(label.dir());
+ base::TrimString(medium_name, "/", &medium_name);
+ // That may have generated a name the same as the short name of the
+ // target which we already wrote.
+ if (medium_name != label.name())
+ WritePhonyRule(target, target_file, medium_name);
}
+ // Write short names for ones which are unique.
+ if (small_name_count[label.name()] == 1)
+ WritePhonyRule(target, target_file, label.name());
+
if (!all_rules.empty())
all_rules.append(" $\n ");
all_rules.append(target_file.value());
}
+ // Pick up phony rules for the toplevel targets with non-unique names (which
+ // would have been skipped in the above loop).
+ for (size_t i = 0; i < toplevel_targets.size(); i++) {
+ if (small_name_count[toplevel_targets[i]->label().name()] > 1) {
+ const Target* target = toplevel_targets[i];
+ WritePhonyRule(target, helper_.GetTargetOutputFile(target),
+ target->label().name());
+ }
+ }
+
if (!all_rules.empty()) {
out_ << "\nbuild all: phony " << all_rules << std::endl;
out_ << "default all" << std::endl;
}
}
+void NinjaBuildWriter::WritePhonyRule(const Target* target,
+ const OutputFile& target_file,
+ const std::string& phony_name) {
+ if (target_file.value() == phony_name)
+ return; // No need for a phony rule.
+
+ EscapeOptions ninja_escape;
+ ninja_escape.mode = ESCAPE_NINJA;
+
+ // Escape for special chars Ninja will handle.
+ std::string escaped = EscapeString(phony_name, ninja_escape, NULL);
+
+ out_ << "build " << escaped << ": phony ";
+ path_output_.WriteFile(out_, target_file);
+ out_ << std::endl;
+}
diff --git a/tools/gn/ninja_build_writer.h b/tools/gn/ninja_build_writer.h
index 4c72ef9050..95674567ef 100644
--- a/tools/gn/ninja_build_writer.h
+++ b/tools/gn/ninja_build_writer.h
@@ -39,6 +39,9 @@ class NinjaBuildWriter {
void WriteSubninjas();
void WritePhonyAndAllRules();
+ void WritePhonyRule(const Target* target, const OutputFile& target_file,
+ const std::string& phony_name);
+
const BuildSettings* build_settings_;
std::vector<const Settings*> all_settings_;
std::vector<const Target*> default_toolchain_targets_;
diff --git a/tools/gn/parser.cc b/tools/gn/parser.cc
index f533542e71..2d5e8fe559 100644
--- a/tools/gn/parser.cc
+++ b/tools/gn/parser.cc
@@ -357,8 +357,17 @@ scoped_ptr<ListNode> Parser::ParseList(Token::Type stop_before,
scoped_ptr<ListNode> list(new ListNode);
list->set_begin_token(cur_token());
bool just_got_comma = false;
+ bool first_time = true;
while (!LookAhead(stop_before)) {
- just_got_comma = false;
+ if (!first_time) {
+ if (!just_got_comma) {
+ // Require commas separate things in lists.
+ *err_ = Err(cur_token(), "Expected comma between items.");
+ return scoped_ptr<ListNode>();
+ }
+ }
+ first_time = false;
+
// Why _OR? We're parsing things that are higher precedence than the ,
// that separates the items of the list. , should appear lower than
// boolean expressions (the lowest of which is OR), but above assignments.
diff --git a/tools/gn/parser_unittest.cc b/tools/gn/parser_unittest.cc
index adb8fdb4be..1205038a8b 100644
--- a/tools/gn/parser_unittest.cc
+++ b/tools/gn/parser_unittest.cc
@@ -125,6 +125,7 @@ TEST(Parser, FunctionCall) {
" LITERAL(1)\n"
" LITERAL(2)\n");
DoExpressionErrorTest("foo(1, 2,)", 1, 10);
+ DoExpressionErrorTest("foo(1 2)", 1, 7);
}
TEST(Parser, ParenExpression) {
diff --git a/tools/gn/secondary/sdch/BUILD.gn b/tools/gn/secondary/sdch/BUILD.gn
index 5e1340364c..73f3afb8d7 100644
--- a/tools/gn/secondary/sdch/BUILD.gn
+++ b/tools/gn/secondary/sdch/BUILD.gn
@@ -52,4 +52,11 @@ static_library("sdch") {
}
deps = [ "//third_party/zlib" ]
+
+ if (is_clang) {
+ cflags = [
+ # TODO(mostynb): remove this if open-vcdiff is ever updated for c++11:
+ "-Wno-deprecated-declarations",
+ ]
+ }
}
diff --git a/tools/gn/secondary/third_party/expat/BUILD.gn b/tools/gn/secondary/third_party/expat/BUILD.gn
deleted file mode 100644
index 814c336bef..0000000000
--- a/tools/gn/secondary/third_party/expat/BUILD.gn
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-if (is_linux) {
- config("expat_config") {
- libs = [ "expat" ]
- }
-
- group("expat") {
- direct_dependent_configs = [ ":expat_config" ]
- }
-
- # TODO(brettw) Android needs direct dependent includes of
- # <android_src>/external/expat/lib
-} else {
- config("expat_config") {
- include_dirs = [ "files/lib" ]
- defines = [ "XML_STATIC" ]
- }
-
- static_library("expat") {
- sources = [
- "files/lib/expat.h",
- "files/lib/xmlparse.c",
- "files/lib/xmlrole.c",
- "files/lib/xmltok.c",
- ]
-
- defines = [ "_LIB" ]
- if (is_win) {
- defines += [ "COMPILED_FROM_DSP" ]
- } else {
- defines += [ "HAVE_EXPAT_CONFIG_H" ]
- }
- }
-}
diff --git a/tools/gn/secondary/third_party/libevent/BUILD.gn b/tools/gn/secondary/third_party/libevent/BUILD.gn
deleted file mode 100644
index 0e3064a9b8..0000000000
--- a/tools/gn/secondary/third_party/libevent/BUILD.gn
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-static_library("libevent") {
- sources = [
- "buffer.c",
- "epoll.c",
- "epoll_sub.c",
- "evbuffer.c",
- "evdns.c",
- "event.c",
- "event_tagging.c",
- "evrpc.c",
- "evutil.c",
- "http.c",
- "kqueue.c",
- "log.c",
- "poll.c",
- "select.c",
- "signal.c",
- "strlcpy.c",
- ]
-
- defines = [ "HAVE_CONFIG_H" ]
-
- if (is_linux) {
- libs = [ "rt" ]
- } else {
- sources -= [ "epoll.c", "epoll_sub.c" ]
- }
-
- if (is_mac) {
- include_dirs = [ "mac" ]
- } else if (is_linux) {
- include_dirs = [ "linux" ]
- sources -= [ "kqueue.c" ]
- } else if (is_android) {
- include_dirs = [ "android" ]
- sources -= [ "kqueue.c" ]
- }
-
- configs -= [ "//build/config/compiler:chromium_code" ]
- configs += [ "//build/config/compiler:no_chromium_code" ]
-}
diff --git a/tools/gn/secondary/third_party/libxml/BUILD.gn b/tools/gn/secondary/third_party/libxml/BUILD.gn
deleted file mode 100644
index e3f26a16a0..0000000000
--- a/tools/gn/secondary/third_party/libxml/BUILD.gn
+++ /dev/null
@@ -1,190 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Define an "os_include" variable that points at the OS-specific generated
-# headers. These were generated by running the configure script offline.
-if (is_linux || is_android) {
- os_include = "linux"
-} else if (is_mac || is_ios) {
- os_include = "mac"
-} else if (is_win) {
- os_include = "win32"
-}
-
-config("libxml_config") {
- # Define LIBXML_STATIC as nothing to match how libxml.h (an internal header)
- # defines LIBXML_STATIC, otherwise we get the macro redefined warning from
- # GCC. ("defines" does "-DFOO" which defines the macro FOO as 1.)
- cflags = [ "-DLIBXML_STATIC=" ]
-
- include_dirs = [
- "src/include",
- "$os_include/include",
- ]
-}
-
-static_library("libxml") {
- output_name = "libxml2"
- sources = [
- "chromium/libxml_utils.h",
- "chromium/libxml_utils.cc",
- "linux/config.h",
- "linux/include/libxml/xmlversion.h",
- "mac/config.h",
- "mac/include/libxml/xmlversion.h",
- "src/include/libxml/c14n.h",
- "src/include/libxml/catalog.h",
- "src/include/libxml/chvalid.h",
- "src/include/libxml/debugXML.h",
- "src/include/libxml/dict.h",
- "src/include/libxml/DOCBparser.h",
- "src/include/libxml/encoding.h",
- "src/include/libxml/entities.h",
- "src/include/libxml/globals.h",
- "src/include/libxml/hash.h",
- "src/include/libxml/HTMLparser.h",
- "src/include/libxml/HTMLtree.h",
- "src/include/libxml/list.h",
- "src/include/libxml/nanoftp.h",
- "src/include/libxml/nanohttp.h",
- "src/include/libxml/parser.h",
- "src/include/libxml/parserInternals.h",
- "src/include/libxml/pattern.h",
- "src/include/libxml/relaxng.h",
- "src/include/libxml/SAX.h",
- "src/include/libxml/SAX2.h",
- "src/include/libxml/schemasInternals.h",
- "src/include/libxml/schematron.h",
- "src/include/libxml/threads.h",
- "src/include/libxml/tree.h",
- "src/include/libxml/uri.h",
- "src/include/libxml/valid.h",
- "src/include/libxml/xinclude.h",
- "src/include/libxml/xlink.h",
- "src/include/libxml/xmlautomata.h",
- "src/include/libxml/xmlerror.h",
- "src/include/libxml/xmlexports.h",
- "src/include/libxml/xmlIO.h",
- "src/include/libxml/xmlmemory.h",
- "src/include/libxml/xmlmodule.h",
- "src/include/libxml/xmlreader.h",
- "src/include/libxml/xmlregexp.h",
- "src/include/libxml/xmlsave.h",
- "src/include/libxml/xmlschemas.h",
- "src/include/libxml/xmlschemastypes.h",
- "src/include/libxml/xmlstring.h",
- "src/include/libxml/xmlunicode.h",
- "src/include/libxml/xmlwriter.h",
- "src/include/libxml/xpath.h",
- "src/include/libxml/xpathInternals.h",
- "src/include/libxml/xpointer.h",
- "src/include/win32config.h",
- "src/include/wsockcompat.h",
- "src/acconfig.h",
- "src/c14n.c",
- "src/catalog.c",
- "src/chvalid.c",
- "src/debugXML.c",
- "src/dict.c",
- "src/DOCBparser.c",
- "src/elfgcchack.h",
- "src/encoding.c",
- "src/entities.c",
- "src/error.c",
- "src/globals.c",
- "src/hash.c",
- "src/HTMLparser.c",
- "src/HTMLtree.c",
- "src/legacy.c",
- "src/libxml.h",
- "src/list.c",
- "src/nanoftp.c",
- "src/nanohttp.c",
- "src/parser.c",
- "src/parserInternals.c",
- "src/pattern.c",
- "src/relaxng.c",
- "src/SAX.c",
- "src/SAX2.c",
- "src/schematron.c",
- "src/threads.c",
- "src/tree.c",
- #"src/trio.c",
- #"src/trio.h",
- #"src/triodef.h",
- #"src/trionan.c",
- #"src/trionan.h",
- #"src/triop.h",
- #"src/triostr.c",
- #"src/triostr.h",
- "src/uri.c",
- "src/valid.c",
- "src/xinclude.c",
- "src/xlink.c",
- "src/xmlIO.c",
- "src/xmlmemory.c",
- "src/xmlmodule.c",
- "src/xmlreader.c",
- "src/xmlregexp.c",
- "src/xmlsave.c",
- "src/xmlschemas.c",
- "src/xmlschemastypes.c",
- "src/xmlstring.c",
- "src/xmlunicode.c",
- "src/xmlwriter.c",
- "src/xpath.c",
- "src/xpointer.c",
- "win32/config.h",
- "win32/include/libxml/xmlversion.h",
- ]
-
- configs -= [ "//build/config/compiler:chromium_code" ]
- configs += [ "//build/config/compiler:no_chromium_code" ]
-
- direct_dependent_configs = [ ":libxml_config" ]
- forward_dependent_configs_from = [ "//third_party/icu:icuuc" ]
-
- deps = [
- "//third_party/icu:icuuc",
- "//third_party/zlib",
- ]
-
- if (is_win) {
- cflags_c = [
- "/wd4101", # Unreferenced local variable.
- ]
- } else if (is_mac || is_android) {
- # http://www.xmlsoft.org/threads.html says that this is required when using
- # libxml from several threads, which can possibly happen in chrome. On
- # linux, this is picked up by transitivity from pkg-config output from
- # build/linux/system.gyp.
- defines = [ "_REENTRANT" ]
- }
-
- if (is_clang) {
- cflags = [
- # libxml passes `const unsigned char*` through `const char*`.
- "-Wno-pointer-sign",
-
- # pattern.c and uri.c both have an intentional `for (...);` /
- # `while(...);` loop. I submitted a patch to move the `'` to its own
- # line, but until that's landed suppress the warning:
- "-Wno-empty-body",
-
- # See http://crbug.com/138571#c8
- "-Wno-ignored-attributes",
- ]
- if (is_mac) {
- # Mac Clang warnings.
- cflags += [
- # debugXML.c compares array 'arg' to NULL.
- "-Wno-tautological-pointer-compare",
- ]
- }
- }
-
- include_dirs = [
- "$os_include",
- ]
-}
diff --git a/tools/gn/secondary/third_party/mach_override/BUILD.gn b/tools/gn/secondary/third_party/mach_override/BUILD.gn
deleted file mode 100644
index f50d3cc6c1..0000000000
--- a/tools/gn/secondary/third_party/mach_override/BUILD.gn
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-static_library("libudis86") {
- sources = [
- "libudis86/decode.c",
- "libudis86/decode.h",
- "libudis86/extern.h",
- "libudis86/input.c",
- "libudis86/input.h",
- "libudis86/itab.c",
- "libudis86/itab.h",
- "libudis86/syn-att.c",
- "libudis86/syn-intel.c",
- "libudis86/syn.c",
- "libudis86/syn.h",
- "libudis86/types.h",
- "libudis86/udint.h",
- "libudis86/udis86.c",
- "udis86.h",
- ]
- defines = [
- "HAVE_ASSERT_H",
- "HAVE_STRING_H",
- ]
-}
-
-static_library("mach_override") {
- sources = [
- "mach_override.c",
- "mach_override.h",
- ]
- deps = [
- ":libudis86",
- ]
-}
diff --git a/tools/gn/secondary/third_party/snappy/BUILD.gn b/tools/gn/secondary/third_party/snappy/BUILD.gn
deleted file mode 100644
index 705cb8aa2b..0000000000
--- a/tools/gn/secondary/third_party/snappy/BUILD.gn
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-config("snappy_config") {
- include_dirs = [
- "src",
- ]
-
- # These OS-specific generated headers were made by running the configure
- # script offline.
- if (is_win) {
- include_dirs += [ "win32" ]
- } else if (is_mac) {
- include_dirs += [ "mac" ]
- } else {
- include_dirs += [ "linux" ]
- }
-}
-
-static_library("snappy") {
- sources = [
- "src/snappy-internal.h",
- "src/snappy-sinksource.cc",
- "src/snappy-sinksource.h",
- "src/snappy-stubs-internal.cc",
- "src/snappy-stubs-internal.h",
- "src/snappy.cc",
- "src/snappy.h",
- ]
-
- configs -= [ "//build/config/compiler:chromium_code" ]
- configs += [ "//build/config/compiler:no_chromium_code" ]
- direct_dependent_configs = [ ":snappy_config" ]
-
- if (is_clang) {
- # snappy-stubs-internal.h unapologetically has: using namespace std
- # https://code.google.com/p/snappy/issues/detail?id=70
- configs -= [ "//build/config/clang:extra_warnings" ]
- }
-
- if (is_win) {
- # https://code.google.com/p/snappy/issues/detail?id=75
- cflags = [ "/wd4267" ] # Conversion from size_t to 'type'.
- }
-}
diff --git a/tools/gn/secondary/third_party/wtl/BUILD.gn b/tools/gn/secondary/third_party/wtl/BUILD.gn
deleted file mode 100644
index 7ebe9d99d2..0000000000
--- a/tools/gn/secondary/third_party/wtl/BUILD.gn
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-config("wtl_includes") {
- include_dirs = [ "include" ]
-}
-
-# WTL is only header files so we use a "group" target type which doesn't
-# actually generate anything linkable, and inject the required config for
-# making the include directories work.
-group("wtl") {
- all_dependent_configs = ":wtl_includes"
-}
diff --git a/tools/gn/secondary/third_party/zlib/BUILD.gn b/tools/gn/secondary/third_party/zlib/BUILD.gn
deleted file mode 100644
index 323ff86e9b..0000000000
--- a/tools/gn/secondary/third_party/zlib/BUILD.gn
+++ /dev/null
@@ -1,102 +0,0 @@
-# Copyright (c) 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-config("zlib_config") {
- include_dirs = [ "." ]
-}
-
-static_library("zlib") {
- if (!is_win) {
- # Don't stomp on "libzlib" on other platforms.
- output_name = "chrome_zlib"
- }
-
- sources = [
- "adler32.c",
- "compress.c",
- "crc32.c",
- "crc32.h",
- "deflate.c",
- "deflate.h",
- "gzclose.c",
- "gzguts.h",
- "gzlib.c",
- "gzread.c",
- "gzwrite.c",
- "infback.c",
- "inffast.c",
- "inffast.h",
- "inffixed.h",
- "inflate.c",
- "inflate.h",
- "inftrees.c",
- "inftrees.h",
- "mozzconf.h",
- "trees.c",
- "trees.h",
- "uncompr.c",
- "zconf.h",
- "zlib.h",
- "zutil.c",
- "zutil.h",
- ]
-
- configs -= [ "//build/config/compiler:chromium_code" ]
- configs += [ "//build/config/compiler:no_chromium_code" ]
-
- direct_dependent_configs = [ ":zlib_config" ]
-}
-
-static_library("minizip") {
- sources = [
- "contrib/minizip/ioapi.c",
- "contrib/minizip/ioapi.h",
- "contrib/minizip/iowin32.c",
- "contrib/minizip/iowin32.h",
- "contrib/minizip/unzip.c",
- "contrib/minizip/unzip.h",
- "contrib/minizip/zip.c",
- "contrib/minizip/zip.h",
- ]
-
- if (!is_win) {
- sources -= [
- "contrib/minizip/iowin32.c",
- "contrib/minizip/iowin32.h",
- ]
- }
- if (is_mac || is_ios || is_android) {
- # Mac, Android and the BSDs don't have fopen64, ftello64, or fseeko64. We
- # use fopen, ftell, and fseek instead on these systems.
- defines = [ "USE_FILE32API" ]
- }
-
- if (is_clang) {
- # zlib uses `if ((a == b))` for some reason.
- cflags = [ "-Wno-parentheses-equality" ]
- }
-
- deps = [ ":zlib" ]
-
- configs -= [ "//build/config/compiler:chromium_code" ]
- configs += [ "//build/config/compiler:no_chromium_code" ]
- direct_dependent_configs = [ ":zlib_config" ]
-}
-
-if (!is_android) {
- static_library("zip") {
- sources = [
- "google/zip.cc",
- "google/zip.h",
- "google/zip_internal.cc",
- "google/zip_internal.h",
- "google/zip_reader.cc",
- "google/zip_reader.h",
- ]
- deps = [
- ":minizip",
- "//base",
- ]
- }
-}
diff --git a/tools/gn/secondary/tools/grit/grit_rule.gni b/tools/gn/secondary/tools/grit/grit_rule.gni
index 3836e718f5..789e40ea94 100644
--- a/tools/gn/secondary/tools/grit/grit_rule.gni
+++ b/tools/gn/secondary/tools/grit/grit_rule.gni
@@ -38,7 +38,7 @@ template("grit") {
[ "--inputs", source_path, "-f", resource_ids] + grit_flags, "list lines")
# The inputs are relative to the current (build) directory, rebase to
# the current one.
- grit_inputs = rebase_path(grit_inputs_build_rel, root_build_dir)
+ grit_inputs = rebase_path(grit_inputs_build_rel, ".", root_build_dir)
grit_outputs_build_rel = exec_script(grit_info_script,
[ "--outputs", "$output_dir", source_path, "-f", resource_ids ] +
diff --git a/tools/gn/secondary/v8/BUILD.gn b/tools/gn/secondary/v8/BUILD.gn
index f704b4f29f..0704f3c70c 100644
--- a/tools/gn/secondary/v8/BUILD.gn
+++ b/tools/gn/secondary/v8/BUILD.gn
@@ -21,668 +21,670 @@ config("internal_config") {
}
}
-# TODO(brettw) Fix v8_base on Android and re-enable.
-if (is_android) {
- source_set("v8_base") {
- }
-} else {
-
+# TODO(jochen) Figure out how to maintain this file without blocking v8 rolls.
source_set("v8_base") {
- sources = [
- "src/accessors.cc",
- "src/accessors.h",
- "src/allocation.cc",
- "src/allocation.h",
- "src/allocation-site-scopes.cc",
- "src/allocation-site-scopes.h",
- "src/allocation-tracker.cc",
- "src/allocation-tracker.h",
- "src/api.cc",
- "src/api.h",
- "src/arguments.cc",
- "src/arguments.h",
- "src/assembler.cc",
- "src/assembler.h",
- "src/assert-scope.h",
- "src/assert-scope.cc",
- "src/ast.cc",
- "src/ast.h",
- "src/atomicops.h",
- "src/atomicops_internals_x86_gcc.cc",
- "src/bignum-dtoa.cc",
- "src/bignum-dtoa.h",
- "src/bignum.cc",
- "src/bignum.h",
- "src/bootstrapper.cc",
- "src/bootstrapper.h",
- "src/builtins.cc",
- "src/builtins.h",
- "src/bytecodes-irregexp.h",
- "src/cached-powers.cc",
- "src/cached-powers.h",
- "src/char-predicates-inl.h",
- "src/char-predicates.h",
- "src/checks.cc",
- "src/checks.h",
- "src/circular-queue-inl.h",
- "src/circular-queue.h",
- "src/code-stubs.cc",
- "src/code-stubs.h",
- "src/code-stubs-hydrogen.cc",
- "src/code.h",
- "src/codegen.cc",
- "src/codegen.h",
- "src/compilation-cache.cc",
- "src/compilation-cache.h",
- "src/compiler.cc",
- "src/compiler.h",
- "src/contexts.cc",
- "src/contexts.h",
- "src/conversions-inl.h",
- "src/conversions.cc",
- "src/conversions.h",
- "src/counters.cc",
- "src/counters.h",
- "src/cpu-profiler-inl.h",
- "src/cpu-profiler.cc",
- "src/cpu-profiler.h",
- "src/cpu.cc",
- "src/cpu.h",
- "src/data-flow.cc",
- "src/data-flow.h",
- "src/date.cc",
- "src/date.h",
- "src/dateparser-inl.h",
- "src/dateparser.cc",
- "src/dateparser.h",
- "src/debug-agent.cc",
- "src/debug-agent.h",
- "src/debug.cc",
- "src/debug.h",
- "src/deoptimizer.cc",
- "src/deoptimizer.h",
- "src/disasm.h",
- "src/disassembler.cc",
- "src/disassembler.h",
- "src/diy-fp.cc",
- "src/diy-fp.h",
- "src/double.h",
- "src/dtoa.cc",
- "src/dtoa.h",
- "src/effects.h",
- "src/elements-kind.cc",
- "src/elements-kind.h",
- "src/elements.cc",
- "src/elements.h",
- "src/execution.cc",
- "src/execution.h",
- "src/extensions/externalize-string-extension.cc",
- "src/extensions/externalize-string-extension.h",
- "src/extensions/free-buffer-extension.cc",
- "src/extensions/free-buffer-extension.h",
- "src/extensions/gc-extension.cc",
- "src/extensions/gc-extension.h",
- "src/extensions/statistics-extension.cc",
- "src/extensions/statistics-extension.h",
- "src/extensions/trigger-failure-extension.cc",
- "src/extensions/trigger-failure-extension.h",
- "src/factory.cc",
- "src/factory.h",
- "src/fast-dtoa.cc",
- "src/fast-dtoa.h",
- "src/feedback-slots.h",
- "src/fixed-dtoa.cc",
- "src/fixed-dtoa.h",
- "src/flag-definitions.h",
- "src/flags.cc",
- "src/flags.h",
- "src/frames-inl.h",
- "src/frames.cc",
- "src/frames.h",
- "src/full-codegen.cc",
- "src/full-codegen.h",
- "src/func-name-inferrer.cc",
- "src/func-name-inferrer.h",
- "src/gdb-jit.cc",
- "src/gdb-jit.h",
- "src/global-handles.cc",
- "src/global-handles.h",
- "src/globals.h",
- "src/handles-inl.h",
- "src/handles.cc",
- "src/handles.h",
- "src/hashmap.h",
- "src/heap-inl.h",
- "src/heap-profiler.cc",
- "src/heap-profiler.h",
- "src/heap-snapshot-generator-inl.h",
- "src/heap-snapshot-generator.cc",
- "src/heap-snapshot-generator.h",
- "src/heap.cc",
- "src/heap.h",
- "src/hydrogen-alias-analysis.h",
- "src/hydrogen-bce.cc",
- "src/hydrogen-bce.h",
- "src/hydrogen-bch.cc",
- "src/hydrogen-bch.h",
- "src/hydrogen-canonicalize.cc",
- "src/hydrogen-canonicalize.h",
- "src/hydrogen-check-elimination.cc",
- "src/hydrogen-check-elimination.h",
- "src/hydrogen-dce.cc",
- "src/hydrogen-dce.h",
- "src/hydrogen-dehoist.cc",
- "src/hydrogen-dehoist.h",
- "src/hydrogen-environment-liveness.cc",
- "src/hydrogen-environment-liveness.h",
- "src/hydrogen-escape-analysis.cc",
- "src/hydrogen-escape-analysis.h",
- "src/hydrogen-flow-engine.h",
- "src/hydrogen-instructions.cc",
- "src/hydrogen-instructions.h",
- "src/hydrogen.cc",
- "src/hydrogen.h",
- "src/hydrogen-gvn.cc",
- "src/hydrogen-gvn.h",
- "src/hydrogen-infer-representation.cc",
- "src/hydrogen-infer-representation.h",
- "src/hydrogen-infer-types.cc",
- "src/hydrogen-infer-types.h",
- "src/hydrogen-load-elimination.cc",
- "src/hydrogen-load-elimination.h",
- "src/hydrogen-mark-deoptimize.cc",
- "src/hydrogen-mark-deoptimize.h",
- "src/hydrogen-mark-unreachable.cc",
- "src/hydrogen-mark-unreachable.h",
- "src/hydrogen-osr.cc",
- "src/hydrogen-osr.h",
- "src/hydrogen-range-analysis.cc",
- "src/hydrogen-range-analysis.h",
- "src/hydrogen-redundant-phi.cc",
- "src/hydrogen-redundant-phi.h",
- "src/hydrogen-removable-simulates.cc",
- "src/hydrogen-removable-simulates.h",
- "src/hydrogen-representation-changes.cc",
- "src/hydrogen-representation-changes.h",
- "src/hydrogen-sce.cc",
- "src/hydrogen-sce.h",
- "src/hydrogen-store-elimination.cc",
- "src/hydrogen-store-elimination.h",
- "src/hydrogen-uint32-analysis.cc",
- "src/hydrogen-uint32-analysis.h",
- "src/i18n.cc",
- "src/i18n.h",
- "src/icu_util.cc",
- "src/icu_util.h",
- "src/ic-inl.h",
- "src/ic.cc",
- "src/ic.h",
- "src/incremental-marking.cc",
- "src/incremental-marking.h",
- "src/interface.cc",
- "src/interface.h",
- "src/interpreter-irregexp.cc",
- "src/interpreter-irregexp.h",
- "src/isolate.cc",
- "src/isolate.h",
- "src/json-parser.h",
- "src/json-stringifier.h",
- "src/jsregexp-inl.h",
- "src/jsregexp.cc",
- "src/jsregexp.h",
- "src/lazy-instance.h",
- # TODO(jochen): move libplatform/ files to their own target.
- "src/libplatform/default-platform.cc",
- "src/libplatform/default-platform.h",
- "src/libplatform/task-queue.cc",
- "src/libplatform/task-queue.h",
- "src/libplatform/worker-thread.cc",
- "src/libplatform/worker-thread.h",
- "src/list-inl.h",
- "src/list.h",
- "src/lithium-allocator-inl.h",
- "src/lithium-allocator.cc",
- "src/lithium-allocator.h",
- "src/lithium-codegen.cc",
- "src/lithium-codegen.h",
- "src/lithium.cc",
- "src/lithium.h",
- "src/liveedit.cc",
- "src/liveedit.h",
- "src/log-inl.h",
- "src/log-utils.cc",
- "src/log-utils.h",
- "src/log.cc",
- "src/log.h",
- "src/macro-assembler.h",
- "src/mark-compact.cc",
- "src/mark-compact.h",
- "src/messages.cc",
- "src/messages.h",
- "src/msan.h",
- "src/natives.h",
- "src/objects-debug.cc",
- "src/objects-inl.h",
- "src/objects-printer.cc",
- "src/objects-visiting.cc",
- "src/objects-visiting.h",
- "src/objects.cc",
- "src/objects.h",
- "src/once.cc",
- "src/once.h",
- "src/optimizing-compiler-thread.h",
- "src/optimizing-compiler-thread.cc",
- "src/parser.cc",
- "src/parser.h",
- "src/platform/elapsed-timer.h",
- "src/platform/time.cc",
- "src/platform/time.h",
- "src/platform.h",
- "src/platform/condition-variable.cc",
- "src/platform/condition-variable.h",
- "src/platform/mutex.cc",
- "src/platform/mutex.h",
- "src/platform/semaphore.cc",
- "src/platform/semaphore.h",
- "src/platform/socket.cc",
- "src/platform/socket.h",
- "src/preparse-data-format.h",
- "src/preparse-data.cc",
- "src/preparse-data.h",
- "src/preparser.cc",
- "src/preparser.h",
- "src/prettyprinter.cc",
- "src/prettyprinter.h",
- "src/profile-generator-inl.h",
- "src/profile-generator.cc",
- "src/profile-generator.h",
- "src/property-details.h",
- "src/property.cc",
- "src/property.h",
- "src/regexp-macro-assembler-irregexp-inl.h",
- "src/regexp-macro-assembler-irregexp.cc",
- "src/regexp-macro-assembler-irregexp.h",
- "src/regexp-macro-assembler-tracer.cc",
- "src/regexp-macro-assembler-tracer.h",
- "src/regexp-macro-assembler.cc",
- "src/regexp-macro-assembler.h",
- "src/regexp-stack.cc",
- "src/regexp-stack.h",
- "src/rewriter.cc",
- "src/rewriter.h",
- "src/runtime-profiler.cc",
- "src/runtime-profiler.h",
- "src/runtime.cc",
- "src/runtime.h",
- "src/safepoint-table.cc",
- "src/safepoint-table.h",
- "src/sampler.cc",
- "src/sampler.h",
- "src/scanner-character-streams.cc",
- "src/scanner-character-streams.h",
- "src/scanner.cc",
- "src/scanner.h",
- "src/scopeinfo.cc",
- "src/scopeinfo.h",
- "src/scopes.cc",
- "src/scopes.h",
- "src/serialize.cc",
- "src/serialize.h",
- "src/small-pointer-list.h",
- "src/smart-pointers.h",
- "src/snapshot-common.cc",
- "src/snapshot.h",
- "src/spaces-inl.h",
- "src/spaces.cc",
- "src/spaces.h",
- "src/store-buffer-inl.h",
- "src/store-buffer.cc",
- "src/store-buffer.h",
- "src/string-search.cc",
- "src/string-search.h",
- "src/string-stream.cc",
- "src/string-stream.h",
- "src/strtod.cc",
- "src/strtod.h",
- "src/stub-cache.cc",
- "src/stub-cache.h",
- "src/sweeper-thread.h",
- "src/sweeper-thread.cc",
- "src/token.cc",
- "src/token.h",
- "src/transitions-inl.h",
- "src/transitions.cc",
- "src/transitions.h",
- "src/type-info.cc",
- "src/type-info.h",
- "src/types-inl.h",
- "src/types.cc",
- "src/types.h",
- "src/typing.cc",
- "src/typing.h",
- "src/unbound-queue-inl.h",
- "src/unbound-queue.h",
- "src/unicode-inl.h",
- "src/unicode.cc",
- "src/unicode.h",
- "src/unique.h",
- "src/uri.h",
- "src/utils-inl.h",
- "src/utils.cc",
- "src/utils.h",
- "src/utils/random-number-generator.cc",
- "src/utils/random-number-generator.h",
- "src/v8-counters.cc",
- "src/v8-counters.h",
- "src/v8.cc",
- "src/v8.h",
- "src/v8checks.h",
- "src/v8conversions.cc",
- "src/v8conversions.h",
- "src/v8globals.h",
- "src/v8memory.h",
- "src/v8threads.cc",
- "src/v8threads.h",
- "src/v8utils.cc",
- "src/v8utils.h",
- "src/variables.cc",
- "src/variables.h",
- "src/version.cc",
- "src/version.h",
- "src/vm-state-inl.h",
- "src/vm-state.h",
- "src/zone-inl.h",
- "src/zone.cc",
- "src/zone.h",
- ]
-
- if (cpu_arch == "x86") {
- # TODO(brettw) the GYP file has
- # or v8_target_arch=="mac" or OS=="mac"
- # which I don't understand.
- sources += [
- "src/ia32/assembler-ia32-inl.h",
- "src/ia32/assembler-ia32.cc",
- "src/ia32/assembler-ia32.h",
- "src/ia32/builtins-ia32.cc",
- "src/ia32/code-stubs-ia32.cc",
- "src/ia32/code-stubs-ia32.h",
- "src/ia32/codegen-ia32.cc",
- "src/ia32/codegen-ia32.h",
- "src/ia32/cpu-ia32.cc",
- "src/ia32/debug-ia32.cc",
- "src/ia32/deoptimizer-ia32.cc",
- "src/ia32/disasm-ia32.cc",
- "src/ia32/frames-ia32.cc",
- "src/ia32/frames-ia32.h",
- "src/ia32/full-codegen-ia32.cc",
- "src/ia32/ic-ia32.cc",
- "src/ia32/lithium-codegen-ia32.cc",
- "src/ia32/lithium-codegen-ia32.h",
- "src/ia32/lithium-gap-resolver-ia32.cc",
- "src/ia32/lithium-gap-resolver-ia32.h",
- "src/ia32/lithium-ia32.cc",
- "src/ia32/lithium-ia32.h",
- "src/ia32/macro-assembler-ia32.cc",
- "src/ia32/macro-assembler-ia32.h",
- "src/ia32/regexp-macro-assembler-ia32.cc",
- "src/ia32/regexp-macro-assembler-ia32.h",
- "src/ia32/stub-cache-ia32.cc",
- ]
- } else if (cpu_arch == "x64") {
- sources += [
- "src/x64/assembler-x64-inl.h",
- "src/x64/assembler-x64.cc",
- "src/x64/assembler-x64.h",
- "src/x64/builtins-x64.cc",
- "src/x64/code-stubs-x64.cc",
- "src/x64/code-stubs-x64.h",
- "src/x64/codegen-x64.cc",
- "src/x64/codegen-x64.h",
- "src/x64/cpu-x64.cc",
- "src/x64/debug-x64.cc",
- "src/x64/deoptimizer-x64.cc",
- "src/x64/disasm-x64.cc",
- "src/x64/frames-x64.cc",
- "src/x64/frames-x64.h",
- "src/x64/full-codegen-x64.cc",
- "src/x64/ic-x64.cc",
- "src/x64/lithium-codegen-x64.cc",
- "src/x64/lithium-codegen-x64.h",
- "src/x64/lithium-gap-resolver-x64.cc",
- "src/x64/lithium-gap-resolver-x64.h",
- "src/x64/lithium-x64.cc",
- "src/x64/lithium-x64.h",
- "src/x64/macro-assembler-x64.cc",
- "src/x64/macro-assembler-x64.h",
- "src/x64/regexp-macro-assembler-x64.cc",
- "src/x64/regexp-macro-assembler-x64.h",
- "src/x64/stub-cache-x64.cc",
- ]
- } else if (cpu_arch == "arm") {
- sources += [
- "src/arm/assembler-arm-inl.h",
- "src/arm/assembler-arm.cc",
- "src/arm/assembler-arm.h",
- "src/arm/builtins-arm.cc",
- "src/arm/code-stubs-arm.cc",
- "src/arm/code-stubs-arm.h",
- "src/arm/codegen-arm.cc",
- "src/arm/codegen-arm.h",
- "src/arm/constants-arm.h",
- "src/arm/constants-arm.cc",
- "src/arm/cpu-arm.cc",
- "src/arm/debug-arm.cc",
- "src/arm/deoptimizer-arm.cc",
- "src/arm/disasm-arm.cc",
- "src/arm/frames-arm.cc",
- "src/arm/frames-arm.h",
- "src/arm/full-codegen-arm.cc",
- "src/arm/ic-arm.cc",
- "src/arm/lithium-arm.cc",
- "src/arm/lithium-arm.h",
- "src/arm/lithium-codegen-arm.cc",
- "src/arm/lithium-codegen-arm.h",
- "src/arm/lithium-gap-resolver-arm.cc",
- "src/arm/lithium-gap-resolver-arm.h",
- "src/arm/macro-assembler-arm.cc",
- "src/arm/macro-assembler-arm.h",
- "src/arm/regexp-macro-assembler-arm.cc",
- "src/arm/regexp-macro-assembler-arm.h",
- "src/arm/simulator-arm.cc",
- "src/arm/stub-cache-arm.cc",
- ]
- } else if (cpu_arch == "arm64") {
- sources += [
- "src/arm64/assembler-arm64.cc",
- "src/arm64/assembler-arm64.h",
- "src/arm64/assembler-arm64-inl.h",
- "src/arm64/builtins-arm64.cc",
- "src/arm64/codegen-arm64.cc",
- "src/arm64/codegen-arm64.h",
- "src/arm64/code-stubs-arm64.cc",
- "src/arm64/code-stubs-arm64.h",
- "src/arm64/constants-arm64.h",
- "src/arm64/cpu-arm64.cc",
- "src/arm64/cpu-arm64.h",
- "src/arm64/debug-arm64.cc",
- "src/arm64/decoder-arm64.cc",
- "src/arm64/decoder-arm64.h",
- "src/arm64/decoder-arm64-inl.h",
- "src/arm64/deoptimizer-arm64.cc",
- "src/arm64/disasm-arm64.cc",
- "src/arm64/disasm-arm64.h",
- "src/arm64/frames-arm64.cc",
- "src/arm64/frames-arm64.h",
- "src/arm64/full-codegen-arm64.cc",
- "src/arm64/ic-arm64.cc",
- "src/arm64/instructions-arm64.cc",
- "src/arm64/instructions-arm64.h",
- "src/arm64/instrument-arm64.cc",
- "src/arm64/instrument-arm64.h",
- "src/arm64/lithium-arm64.cc",
- "src/arm64/lithium-arm64.h",
- "src/arm64/lithium-codegen-arm64.cc",
- "src/arm64/lithium-codegen-arm64.h",
- "src/arm64/lithium-gap-resolver-arm64.cc",
- "src/arm64/lithium-gap-resolver-arm64.h",
- "src/arm64/macro-assembler-arm64.cc",
- "src/arm64/macro-assembler-arm64.h",
- "src/arm64/macro-assembler-arm64-inl.h",
- "src/arm64/regexp-macro-assembler-arm64.cc",
- "src/arm64/regexp-macro-assembler-arm64.h",
- "src/arm64/simulator-arm64.cc",
- "src/arm64/simulator-arm64.h",
- "src/arm64/stub-cache-arm64.cc",
- "src/arm64/utils-arm64.cc",
- "src/arm64/utils-arm64.h",
- ]
- } else if (cpu_arch == "mipsel") {
- sources += [
- "src/mips/assembler-mips.cc",
- "src/mips/assembler-mips.h",
- "src/mips/assembler-mips-inl.h",
- "src/mips/builtins-mips.cc",
- "src/mips/codegen-mips.cc",
- "src/mips/codegen-mips.h",
- "src/mips/code-stubs-mips.cc",
- "src/mips/code-stubs-mips.h",
- "src/mips/constants-mips.cc",
- "src/mips/constants-mips.h",
- "src/mips/cpu-mips.cc",
- "src/mips/debug-mips.cc",
- "src/mips/deoptimizer-mips.cc",
- "src/mips/disasm-mips.cc",
- "src/mips/frames-mips.cc",
- "src/mips/frames-mips.h",
- "src/mips/full-codegen-mips.cc",
- "src/mips/ic-mips.cc",
- "src/mips/lithium-codegen-mips.cc",
- "src/mips/lithium-codegen-mips.h",
- "src/mips/lithium-gap-resolver-mips.cc",
- "src/mips/lithium-gap-resolver-mips.h",
- "src/mips/lithium-mips.cc",
- "src/mips/lithium-mips.h",
- "src/mips/macro-assembler-mips.cc",
- "src/mips/macro-assembler-mips.h",
- "src/mips/regexp-macro-assembler-mips.cc",
- "src/mips/regexp-macro-assembler-mips.h",
- "src/mips/simulator-mips.cc",
- "src/mips/stub-cache-mips.cc",
- ]
- }
-
- configs += [ ":internal_config" ]
-
- defines = []
- deps = []
-
- if (is_posix) {
- sources += [
- "src/platform-posix.cc"
- ]
- }
-
- if (is_linux) {
- sources += [
- "src/platform-linux.cc"
- ]
-
- # TODO(brettw)
- # 'conditions': [
- # ['v8_compress_startup_data=="bz2"', {
- # 'libraries': [
- # '-lbz2',
- # ]
- # }],
- # ],
-
- libs = [ "rt" ]
- } else if (is_android) {
- # TODO(brettW) OS=="android" condition from tools/gyp/v8.gyp
- } else if (is_mac) {
- sources += [ "src/platform-macoscc" ]
- } else if (is_win) {
- sources += [
- "src/platform-win32.cc",
- "src/win32-math.cc",
- "src/win32-math.h",
- ]
-
- defines += [ "_CRT_RAND_S" ] # for rand_s()
-
- libs = [ "winmm.lib", "ws2_32.lib" ]
- }
-
-
- if (v8_enable_i18n_support) {
- deps += [ "//third_party/icu" ]
- if (is_win) {
- deps += [ "//third_party/icu:icudata" ]
- }
- } else {
- sources -= [
- "src/i18n.cc",
- "src/i18n.h",
- ]
- }
-
- # TODO(brettw) other conditions from v8.gyp
- # TODO(brettw) icu_use_data_file_flag
-}
-
-action("js2c") {
- script = "tools/js2c.py"
-
- # The script depends on this other script, this rule causes a rebuild if it
- # changes.
- source_prereqs = [ "tools/jsmin.py" ]
-
- sources = [
- "src/runtime.js",
- "src/v8natives.js",
- "src/array.js",
- "src/string.js",
- "src/uri.js",
- "src/math.js",
- "src/messages.js",
- "src/apinatives.js",
- "src/debug-debugger.js",
- "src/mirror-debugger.js",
- "src/liveedit-debugger.js",
- "src/date.js",
- "src/json.js",
- "src/regexp.js",
- "src/arraybuffer.js",
- "src/typedarray.js",
- "src/object-observe.js",
- "src/macros.py",
- ]
-
- outputs = [
- "$target_gen_dir/libraries.cc"
- ]
-
- if (v8_enable_i18n_support) {
- sources += [ "src/i18n.js" ]
- }
-
- args =
- rebase_path(outputs, root_build_dir) +
- [ "CORE", v8_compress_startup_data ] +
- rebase_path(sources, root_build_dir)
}
-
-source_set("v8_nosnapshot") {
- visibility = ":*" # Only targets in this file can depend on this.
-
- sources = [
-
- ]
-
- configs += [ ":internal_config" ]
-}
-
+#if (is_android) {
+# source_set("v8_base") {
+# }
+#} else {
+#
+#source_set("v8_base") {
+# sources = [
+# "src/accessors.cc",
+# "src/accessors.h",
+# "src/allocation.cc",
+# "src/allocation.h",
+# "src/allocation-site-scopes.cc",
+# "src/allocation-site-scopes.h",
+# "src/allocation-tracker.cc",
+# "src/allocation-tracker.h",
+# "src/api.cc",
+# "src/api.h",
+# "src/arguments.cc",
+# "src/arguments.h",
+# "src/assembler.cc",
+# "src/assembler.h",
+# "src/assert-scope.h",
+# "src/assert-scope.cc",
+# "src/ast.cc",
+# "src/ast.h",
+# "src/atomicops.h",
+# "src/atomicops_internals_x86_gcc.cc",
+# "src/bignum-dtoa.cc",
+# "src/bignum-dtoa.h",
+# "src/bignum.cc",
+# "src/bignum.h",
+# "src/bootstrapper.cc",
+# "src/bootstrapper.h",
+# "src/builtins.cc",
+# "src/builtins.h",
+# "src/bytecodes-irregexp.h",
+# "src/cached-powers.cc",
+# "src/cached-powers.h",
+# "src/char-predicates-inl.h",
+# "src/char-predicates.h",
+# "src/checks.cc",
+# "src/checks.h",
+# "src/circular-queue-inl.h",
+# "src/circular-queue.h",
+# "src/code-stubs.cc",
+# "src/code-stubs.h",
+# "src/code-stubs-hydrogen.cc",
+# "src/code.h",
+# "src/codegen.cc",
+# "src/codegen.h",
+# "src/compilation-cache.cc",
+# "src/compilation-cache.h",
+# "src/compiler.cc",
+# "src/compiler.h",
+# "src/contexts.cc",
+# "src/contexts.h",
+# "src/conversions-inl.h",
+# "src/conversions.cc",
+# "src/conversions.h",
+# "src/counters.cc",
+# "src/counters.h",
+# "src/cpu-profiler-inl.h",
+# "src/cpu-profiler.cc",
+# "src/cpu-profiler.h",
+# "src/cpu.cc",
+# "src/cpu.h",
+# "src/data-flow.cc",
+# "src/data-flow.h",
+# "src/date.cc",
+# "src/date.h",
+# "src/dateparser-inl.h",
+# "src/dateparser.cc",
+# "src/dateparser.h",
+# "src/debug-agent.cc",
+# "src/debug-agent.h",
+# "src/debug.cc",
+# "src/debug.h",
+# "src/deoptimizer.cc",
+# "src/deoptimizer.h",
+# "src/disasm.h",
+# "src/disassembler.cc",
+# "src/disassembler.h",
+# "src/diy-fp.cc",
+# "src/diy-fp.h",
+# "src/double.h",
+# "src/dtoa.cc",
+# "src/dtoa.h",
+# "src/effects.h",
+# "src/elements-kind.cc",
+# "src/elements-kind.h",
+# "src/elements.cc",
+# "src/elements.h",
+# "src/execution.cc",
+# "src/execution.h",
+# "src/extensions/externalize-string-extension.cc",
+# "src/extensions/externalize-string-extension.h",
+# "src/extensions/free-buffer-extension.cc",
+# "src/extensions/free-buffer-extension.h",
+# "src/extensions/gc-extension.cc",
+# "src/extensions/gc-extension.h",
+# "src/extensions/statistics-extension.cc",
+# "src/extensions/statistics-extension.h",
+# "src/extensions/trigger-failure-extension.cc",
+# "src/extensions/trigger-failure-extension.h",
+# "src/factory.cc",
+# "src/factory.h",
+# "src/fast-dtoa.cc",
+# "src/fast-dtoa.h",
+# "src/feedback-slots.h",
+# "src/fixed-dtoa.cc",
+# "src/fixed-dtoa.h",
+# "src/flag-definitions.h",
+# "src/flags.cc",
+# "src/flags.h",
+# "src/frames-inl.h",
+# "src/frames.cc",
+# "src/frames.h",
+# "src/full-codegen.cc",
+# "src/full-codegen.h",
+# "src/func-name-inferrer.cc",
+# "src/func-name-inferrer.h",
+# "src/gdb-jit.cc",
+# "src/gdb-jit.h",
+# "src/global-handles.cc",
+# "src/global-handles.h",
+# "src/globals.h",
+# "src/handles-inl.h",
+# "src/handles.cc",
+# "src/handles.h",
+# "src/hashmap.h",
+# "src/heap-inl.h",
+# "src/heap-profiler.cc",
+# "src/heap-profiler.h",
+# "src/heap-snapshot-generator-inl.h",
+# "src/heap-snapshot-generator.cc",
+# "src/heap-snapshot-generator.h",
+# "src/heap.cc",
+# "src/heap.h",
+# "src/hydrogen-alias-analysis.h",
+# "src/hydrogen-bce.cc",
+# "src/hydrogen-bce.h",
+# "src/hydrogen-bch.cc",
+# "src/hydrogen-bch.h",
+# "src/hydrogen-canonicalize.cc",
+# "src/hydrogen-canonicalize.h",
+# "src/hydrogen-check-elimination.cc",
+# "src/hydrogen-check-elimination.h",
+# "src/hydrogen-dce.cc",
+# "src/hydrogen-dce.h",
+# "src/hydrogen-dehoist.cc",
+# "src/hydrogen-dehoist.h",
+# "src/hydrogen-environment-liveness.cc",
+# "src/hydrogen-environment-liveness.h",
+# "src/hydrogen-escape-analysis.cc",
+# "src/hydrogen-escape-analysis.h",
+# "src/hydrogen-flow-engine.h",
+# "src/hydrogen-instructions.cc",
+# "src/hydrogen-instructions.h",
+# "src/hydrogen.cc",
+# "src/hydrogen.h",
+# "src/hydrogen-gvn.cc",
+# "src/hydrogen-gvn.h",
+# "src/hydrogen-infer-representation.cc",
+# "src/hydrogen-infer-representation.h",
+# "src/hydrogen-infer-types.cc",
+# "src/hydrogen-infer-types.h",
+# "src/hydrogen-load-elimination.cc",
+# "src/hydrogen-load-elimination.h",
+# "src/hydrogen-mark-deoptimize.cc",
+# "src/hydrogen-mark-deoptimize.h",
+# "src/hydrogen-mark-unreachable.cc",
+# "src/hydrogen-mark-unreachable.h",
+# "src/hydrogen-osr.cc",
+# "src/hydrogen-osr.h",
+# "src/hydrogen-range-analysis.cc",
+# "src/hydrogen-range-analysis.h",
+# "src/hydrogen-redundant-phi.cc",
+# "src/hydrogen-redundant-phi.h",
+# "src/hydrogen-removable-simulates.cc",
+# "src/hydrogen-removable-simulates.h",
+# "src/hydrogen-representation-changes.cc",
+# "src/hydrogen-representation-changes.h",
+# "src/hydrogen-sce.cc",
+# "src/hydrogen-sce.h",
+# "src/hydrogen-store-elimination.cc",
+# "src/hydrogen-store-elimination.h",
+# "src/hydrogen-uint32-analysis.cc",
+# "src/hydrogen-uint32-analysis.h",
+# "src/i18n.cc",
+# "src/i18n.h",
+# "src/icu_util.cc",
+# "src/icu_util.h",
+# "src/ic-inl.h",
+# "src/ic.cc",
+# "src/ic.h",
+# "src/incremental-marking.cc",
+# "src/incremental-marking.h",
+# "src/interface.cc",
+# "src/interface.h",
+# "src/interpreter-irregexp.cc",
+# "src/interpreter-irregexp.h",
+# "src/isolate.cc",
+# "src/isolate.h",
+# "src/json-parser.h",
+# "src/json-stringifier.h",
+# "src/jsregexp-inl.h",
+# "src/jsregexp.cc",
+# "src/jsregexp.h",
+# "src/lazy-instance.h",
+# # TODO(jochen): move libplatform/ files to their own target.
+# "src/libplatform/default-platform.cc",
+# "src/libplatform/default-platform.h",
+# "src/libplatform/task-queue.cc",
+# "src/libplatform/task-queue.h",
+# "src/libplatform/worker-thread.cc",
+# "src/libplatform/worker-thread.h",
+# "src/list-inl.h",
+# "src/list.h",
+# "src/lithium-allocator-inl.h",
+# "src/lithium-allocator.cc",
+# "src/lithium-allocator.h",
+# "src/lithium-codegen.cc",
+# "src/lithium-codegen.h",
+# "src/lithium.cc",
+# "src/lithium.h",
+# "src/liveedit.cc",
+# "src/liveedit.h",
+# "src/log-inl.h",
+# "src/log-utils.cc",
+# "src/log-utils.h",
+# "src/log.cc",
+# "src/log.h",
+# "src/macro-assembler.h",
+# "src/mark-compact.cc",
+# "src/mark-compact.h",
+# "src/messages.cc",
+# "src/messages.h",
+# "src/msan.h",
+# "src/natives.h",
+# "src/objects-debug.cc",
+# "src/objects-inl.h",
+# "src/objects-printer.cc",
+# "src/objects-visiting.cc",
+# "src/objects-visiting.h",
+# "src/objects.cc",
+# "src/objects.h",
+# "src/once.cc",
+# "src/once.h",
+# "src/optimizing-compiler-thread.h",
+# "src/optimizing-compiler-thread.cc",
+# "src/parser.cc",
+# "src/parser.h",
+# "src/platform/elapsed-timer.h",
+# "src/platform/time.cc",
+# "src/platform/time.h",
+# "src/platform.h",
+# "src/platform/condition-variable.cc",
+# "src/platform/condition-variable.h",
+# "src/platform/mutex.cc",
+# "src/platform/mutex.h",
+# "src/platform/semaphore.cc",
+# "src/platform/semaphore.h",
+# "src/platform/socket.cc",
+# "src/platform/socket.h",
+# "src/preparse-data-format.h",
+# "src/preparse-data.cc",
+# "src/preparse-data.h",
+# "src/preparser.cc",
+# "src/preparser.h",
+# "src/prettyprinter.cc",
+# "src/prettyprinter.h",
+# "src/profile-generator-inl.h",
+# "src/profile-generator.cc",
+# "src/profile-generator.h",
+# "src/property-details.h",
+# "src/property.cc",
+# "src/property.h",
+# "src/regexp-macro-assembler-irregexp-inl.h",
+# "src/regexp-macro-assembler-irregexp.cc",
+# "src/regexp-macro-assembler-irregexp.h",
+# "src/regexp-macro-assembler-tracer.cc",
+# "src/regexp-macro-assembler-tracer.h",
+# "src/regexp-macro-assembler.cc",
+# "src/regexp-macro-assembler.h",
+# "src/regexp-stack.cc",
+# "src/regexp-stack.h",
+# "src/rewriter.cc",
+# "src/rewriter.h",
+# "src/runtime-profiler.cc",
+# "src/runtime-profiler.h",
+# "src/runtime.cc",
+# "src/runtime.h",
+# "src/safepoint-table.cc",
+# "src/safepoint-table.h",
+# "src/sampler.cc",
+# "src/sampler.h",
+# "src/scanner-character-streams.cc",
+# "src/scanner-character-streams.h",
+# "src/scanner.cc",
+# "src/scanner.h",
+# "src/scopeinfo.cc",
+# "src/scopeinfo.h",
+# "src/scopes.cc",
+# "src/scopes.h",
+# "src/serialize.cc",
+# "src/serialize.h",
+# "src/small-pointer-list.h",
+# "src/smart-pointers.h",
+# "src/snapshot-common.cc",
+# "src/snapshot.h",
+# "src/spaces-inl.h",
+# "src/spaces.cc",
+# "src/spaces.h",
+# "src/store-buffer-inl.h",
+# "src/store-buffer.cc",
+# "src/store-buffer.h",
+# "src/string-search.cc",
+# "src/string-search.h",
+# "src/string-stream.cc",
+# "src/string-stream.h",
+# "src/strtod.cc",
+# "src/strtod.h",
+# "src/stub-cache.cc",
+# "src/stub-cache.h",
+# "src/sweeper-thread.h",
+# "src/sweeper-thread.cc",
+# "src/token.cc",
+# "src/token.h",
+# "src/transitions-inl.h",
+# "src/transitions.cc",
+# "src/transitions.h",
+# "src/type-info.cc",
+# "src/type-info.h",
+# "src/types-inl.h",
+# "src/types.cc",
+# "src/types.h",
+# "src/typing.cc",
+# "src/typing.h",
+# "src/unbound-queue-inl.h",
+# "src/unbound-queue.h",
+# "src/unicode-inl.h",
+# "src/unicode.cc",
+# "src/unicode.h",
+# "src/unique.h",
+# "src/uri.h",
+# "src/utils-inl.h",
+# "src/utils.cc",
+# "src/utils.h",
+# "src/utils/random-number-generator.cc",
+# "src/utils/random-number-generator.h",
+# "src/v8-counters.cc",
+# "src/v8-counters.h",
+# "src/v8.cc",
+# "src/v8.h",
+# "src/v8checks.h",
+# "src/v8conversions.cc",
+# "src/v8conversions.h",
+# "src/v8globals.h",
+# "src/v8memory.h",
+# "src/v8threads.cc",
+# "src/v8threads.h",
+# "src/v8utils.cc",
+# "src/v8utils.h",
+# "src/variables.cc",
+# "src/variables.h",
+# "src/version.cc",
+# "src/version.h",
+# "src/vm-state-inl.h",
+# "src/vm-state.h",
+# "src/zone-inl.h",
+# "src/zone.cc",
+# "src/zone.h",
+# ]
+#
+# if (cpu_arch == "x86") {
+# # TODO(brettw) the GYP file has
+# # or v8_target_arch=="mac" or OS=="mac"
+# # which I don't understand.
+# sources += [
+# "src/ia32/assembler-ia32-inl.h",
+# "src/ia32/assembler-ia32.cc",
+# "src/ia32/assembler-ia32.h",
+# "src/ia32/builtins-ia32.cc",
+# "src/ia32/code-stubs-ia32.cc",
+# "src/ia32/code-stubs-ia32.h",
+# "src/ia32/codegen-ia32.cc",
+# "src/ia32/codegen-ia32.h",
+# "src/ia32/cpu-ia32.cc",
+# "src/ia32/debug-ia32.cc",
+# "src/ia32/deoptimizer-ia32.cc",
+# "src/ia32/disasm-ia32.cc",
+# "src/ia32/frames-ia32.cc",
+# "src/ia32/frames-ia32.h",
+# "src/ia32/full-codegen-ia32.cc",
+# "src/ia32/ic-ia32.cc",
+# "src/ia32/lithium-codegen-ia32.cc",
+# "src/ia32/lithium-codegen-ia32.h",
+# "src/ia32/lithium-gap-resolver-ia32.cc",
+# "src/ia32/lithium-gap-resolver-ia32.h",
+# "src/ia32/lithium-ia32.cc",
+# "src/ia32/lithium-ia32.h",
+# "src/ia32/macro-assembler-ia32.cc",
+# "src/ia32/macro-assembler-ia32.h",
+# "src/ia32/regexp-macro-assembler-ia32.cc",
+# "src/ia32/regexp-macro-assembler-ia32.h",
+# "src/ia32/stub-cache-ia32.cc",
+# ]
+# } else if (cpu_arch == "x64") {
+# sources += [
+# "src/x64/assembler-x64-inl.h",
+# "src/x64/assembler-x64.cc",
+# "src/x64/assembler-x64.h",
+# "src/x64/builtins-x64.cc",
+# "src/x64/code-stubs-x64.cc",
+# "src/x64/code-stubs-x64.h",
+# "src/x64/codegen-x64.cc",
+# "src/x64/codegen-x64.h",
+# "src/x64/cpu-x64.cc",
+# "src/x64/debug-x64.cc",
+# "src/x64/deoptimizer-x64.cc",
+# "src/x64/disasm-x64.cc",
+# "src/x64/frames-x64.cc",
+# "src/x64/frames-x64.h",
+# "src/x64/full-codegen-x64.cc",
+# "src/x64/ic-x64.cc",
+# "src/x64/lithium-codegen-x64.cc",
+# "src/x64/lithium-codegen-x64.h",
+# "src/x64/lithium-gap-resolver-x64.cc",
+# "src/x64/lithium-gap-resolver-x64.h",
+# "src/x64/lithium-x64.cc",
+# "src/x64/lithium-x64.h",
+# "src/x64/macro-assembler-x64.cc",
+# "src/x64/macro-assembler-x64.h",
+# "src/x64/regexp-macro-assembler-x64.cc",
+# "src/x64/regexp-macro-assembler-x64.h",
+# "src/x64/stub-cache-x64.cc",
+# ]
+# } else if (cpu_arch == "arm") {
+# sources += [
+# "src/arm/assembler-arm-inl.h",
+# "src/arm/assembler-arm.cc",
+# "src/arm/assembler-arm.h",
+# "src/arm/builtins-arm.cc",
+# "src/arm/code-stubs-arm.cc",
+# "src/arm/code-stubs-arm.h",
+# "src/arm/codegen-arm.cc",
+# "src/arm/codegen-arm.h",
+# "src/arm/constants-arm.h",
+# "src/arm/constants-arm.cc",
+# "src/arm/cpu-arm.cc",
+# "src/arm/debug-arm.cc",
+# "src/arm/deoptimizer-arm.cc",
+# "src/arm/disasm-arm.cc",
+# "src/arm/frames-arm.cc",
+# "src/arm/frames-arm.h",
+# "src/arm/full-codegen-arm.cc",
+# "src/arm/ic-arm.cc",
+# "src/arm/lithium-arm.cc",
+# "src/arm/lithium-arm.h",
+# "src/arm/lithium-codegen-arm.cc",
+# "src/arm/lithium-codegen-arm.h",
+# "src/arm/lithium-gap-resolver-arm.cc",
+# "src/arm/lithium-gap-resolver-arm.h",
+# "src/arm/macro-assembler-arm.cc",
+# "src/arm/macro-assembler-arm.h",
+# "src/arm/regexp-macro-assembler-arm.cc",
+# "src/arm/regexp-macro-assembler-arm.h",
+# "src/arm/simulator-arm.cc",
+# "src/arm/stub-cache-arm.cc",
+# ]
+# } else if (cpu_arch == "arm64") {
+# sources += [
+# "src/arm64/assembler-arm64.cc",
+# "src/arm64/assembler-arm64.h",
+# "src/arm64/assembler-arm64-inl.h",
+# "src/arm64/builtins-arm64.cc",
+# "src/arm64/codegen-arm64.cc",
+# "src/arm64/codegen-arm64.h",
+# "src/arm64/code-stubs-arm64.cc",
+# "src/arm64/code-stubs-arm64.h",
+# "src/arm64/constants-arm64.h",
+# "src/arm64/cpu-arm64.cc",
+# "src/arm64/cpu-arm64.h",
+# "src/arm64/debug-arm64.cc",
+# "src/arm64/decoder-arm64.cc",
+# "src/arm64/decoder-arm64.h",
+# "src/arm64/decoder-arm64-inl.h",
+# "src/arm64/deoptimizer-arm64.cc",
+# "src/arm64/disasm-arm64.cc",
+# "src/arm64/disasm-arm64.h",
+# "src/arm64/frames-arm64.cc",
+# "src/arm64/frames-arm64.h",
+# "src/arm64/full-codegen-arm64.cc",
+# "src/arm64/ic-arm64.cc",
+# "src/arm64/instructions-arm64.cc",
+# "src/arm64/instructions-arm64.h",
+# "src/arm64/instrument-arm64.cc",
+# "src/arm64/instrument-arm64.h",
+# "src/arm64/lithium-arm64.cc",
+# "src/arm64/lithium-arm64.h",
+# "src/arm64/lithium-codegen-arm64.cc",
+# "src/arm64/lithium-codegen-arm64.h",
+# "src/arm64/lithium-gap-resolver-arm64.cc",
+# "src/arm64/lithium-gap-resolver-arm64.h",
+# "src/arm64/macro-assembler-arm64.cc",
+# "src/arm64/macro-assembler-arm64.h",
+# "src/arm64/macro-assembler-arm64-inl.h",
+# "src/arm64/regexp-macro-assembler-arm64.cc",
+# "src/arm64/regexp-macro-assembler-arm64.h",
+# "src/arm64/simulator-arm64.cc",
+# "src/arm64/simulator-arm64.h",
+# "src/arm64/stub-cache-arm64.cc",
+# "src/arm64/utils-arm64.cc",
+# "src/arm64/utils-arm64.h",
+# ]
+# } else if (cpu_arch == "mipsel") {
+# sources += [
+# "src/mips/assembler-mips.cc",
+# "src/mips/assembler-mips.h",
+# "src/mips/assembler-mips-inl.h",
+# "src/mips/builtins-mips.cc",
+# "src/mips/codegen-mips.cc",
+# "src/mips/codegen-mips.h",
+# "src/mips/code-stubs-mips.cc",
+# "src/mips/code-stubs-mips.h",
+# "src/mips/constants-mips.cc",
+# "src/mips/constants-mips.h",
+# "src/mips/cpu-mips.cc",
+# "src/mips/debug-mips.cc",
+# "src/mips/deoptimizer-mips.cc",
+# "src/mips/disasm-mips.cc",
+# "src/mips/frames-mips.cc",
+# "src/mips/frames-mips.h",
+# "src/mips/full-codegen-mips.cc",
+# "src/mips/ic-mips.cc",
+# "src/mips/lithium-codegen-mips.cc",
+# "src/mips/lithium-codegen-mips.h",
+# "src/mips/lithium-gap-resolver-mips.cc",
+# "src/mips/lithium-gap-resolver-mips.h",
+# "src/mips/lithium-mips.cc",
+# "src/mips/lithium-mips.h",
+# "src/mips/macro-assembler-mips.cc",
+# "src/mips/macro-assembler-mips.h",
+# "src/mips/regexp-macro-assembler-mips.cc",
+# "src/mips/regexp-macro-assembler-mips.h",
+# "src/mips/simulator-mips.cc",
+# "src/mips/stub-cache-mips.cc",
+# ]
+# }
+#
+# configs += [ ":internal_config" ]
+#
+# defines = []
+# deps = []
+#
+# if (is_posix) {
+# sources += [
+# "src/platform-posix.cc"
+# ]
+# }
+#
+# if (is_linux) {
+# sources += [
+# "src/platform-linux.cc"
+# ]
+#
+# # TODO(brettw)
+# # 'conditions': [
+# # ['v8_compress_startup_data=="bz2"', {
+# # 'libraries': [
+# # '-lbz2',
+# # ]
+# # }],
+# # ],
+#
+# libs = [ "rt" ]
+# } else if (is_android) {
+# # TODO(brettW) OS=="android" condition from tools/gyp/v8.gyp
+# } else if (is_mac) {
+# sources += [ "src/platform-macoscc" ]
+# } else if (is_win) {
+# sources += [
+# "src/platform-win32.cc",
+# "src/win32-math.cc",
+# "src/win32-math.h",
+# ]
+#
+# defines += [ "_CRT_RAND_S" ] # for rand_s()
+#
+# libs = [ "winmm.lib", "ws2_32.lib" ]
+# }
+#
+#
+# if (v8_enable_i18n_support) {
+# deps += [ "//third_party/icu" ]
+# if (is_win) {
+# deps += [ "//third_party/icu:icudata" ]
+# }
+# } else {
+# sources -= [
+# "src/i18n.cc",
+# "src/i18n.h",
+# ]
+# }
+#
+# # TODO(brettw) other conditions from v8.gyp
+# # TODO(brettw) icu_use_data_file_flag
+#}
+#
+#action("js2c") {
+# script = "tools/js2c.py"
+#
+# # The script depends on this other script, this rule causes a rebuild if it
+# # changes.
+# source_prereqs = [ "tools/jsmin.py" ]
+#
+# sources = [
+# "src/runtime.js",
+# "src/v8natives.js",
+# "src/array.js",
+# "src/string.js",
+# "src/uri.js",
+# "src/math.js",
+# "src/messages.js",
+# "src/apinatives.js",
+# "src/debug-debugger.js",
+# "src/mirror-debugger.js",
+# "src/liveedit-debugger.js",
+# "src/date.js",
+# "src/json.js",
+# "src/regexp.js",
+# "src/arraybuffer.js",
+# "src/typedarray.js",
+# "src/object-observe.js",
+# "src/macros.py",
+# ]
+#
+# outputs = [
+# "$target_gen_dir/libraries.cc"
+# ]
+#
+# if (v8_enable_i18n_support) {
+# sources += [ "src/i18n.js" ]
+# }
+#
+# args =
+# rebase_path(outputs, root_build_dir) +
+# [ "CORE", v8_compress_startup_data ] +
+# rebase_path(sources, root_build_dir)
+#}
+#
+#source_set("v8_nosnapshot") {
+# visibility = ":*" # Only targets in this file can depend on this.
+#
+# sources = [
+#
+# ]
+#
+# configs += [ ":internal_config" ]
+#}
+#
# TODO finish this, currently has linker errors.
#executable("mksnapshot") {
# visibility = ":*" # Only targets in this file can depend on this.
@@ -702,5 +704,5 @@ source_set("v8_nosnapshot") {
# libs = [ "bz2" ]
# }
#}
-
-} # end Android commenting-out.
+#
+#} # end Android commenting-out.
diff --git a/tools/gn/target.cc b/tools/gn/target.cc
index 0763a8c7d9..259fcf50bb 100644
--- a/tools/gn/target.cc
+++ b/tools/gn/target.cc
@@ -147,6 +147,7 @@ void Target::OnResolved() {
// pulled from G to A in case G has configs directly on it).
PullDependentTargetInfo(&unique_configs);
}
+ PullForwardedDependentConfigs();
}
bool Target::IsLinkable() const {
@@ -181,6 +182,12 @@ void Target::PullDependentTargetInfo(std::set<const Config*>* unique_configs) {
all_libs_.append(dep->all_libs());
}
}
+}
+
+void Target::PullForwardedDependentConfigs() {
+ // Groups implicitly forward all if its dependency's configs.
+ if (output_type() == GROUP)
+ forward_dependent_configs_ = deps_;
// Forward direct dependent configs if requested.
for (size_t dep = 0; dep < forward_dependent_configs_.size(); dep++) {
diff --git a/tools/gn/target.h b/tools/gn/target.h
index 6b8eb99e18..6f3965b492 100644
--- a/tools/gn/target.h
+++ b/tools/gn/target.h
@@ -150,6 +150,9 @@ class Target : public Item {
// dependencies have been resolved.
void PullDependentTargetInfo(std::set<const Config*>* unique_configs);
+ // Pulls dependent configs that need forwarding.
+ void PullForwardedDependentConfigs();
+
OutputType output_type_;
std::string output_name_;
std::string output_extension_;
diff --git a/tools/gn/target_unittest.cc b/tools/gn/target_unittest.cc
index ef213b5286..860dcaf1b0 100644
--- a/tools/gn/target_unittest.cc
+++ b/tools/gn/target_unittest.cc
@@ -169,3 +169,33 @@ TEST_F(TargetTest, DependentConfigs) {
ASSERT_EQ(1u, a_fwd.all_dependent_configs().size());
EXPECT_EQ(&all, a_fwd.all_dependent_configs()[0].ptr);
}
+
+// Tests that forward_dependent_configs_from works for groups, forwarding the
+// group's deps' dependent configs.
+TEST_F(TargetTest, ForwardDependentConfigsFromGroups) {
+ Target a(&settings_, Label(SourceDir("//foo/"), "a"));
+ a.set_output_type(Target::EXECUTABLE);
+ Target b(&settings_, Label(SourceDir("//foo/"), "b"));
+ b.set_output_type(Target::GROUP);
+ Target c(&settings_, Label(SourceDir("//foo/"), "c"));
+ c.set_output_type(Target::STATIC_LIBRARY);
+ a.deps().push_back(LabelTargetPair(&b));
+ b.deps().push_back(LabelTargetPair(&c));
+
+ // Direct dependent config on C.
+ Config direct(&settings_, Label(SourceDir("//foo/"), "direct"));
+ c.direct_dependent_configs().push_back(LabelConfigPair(&direct));
+
+ // A forwards the dependent configs from B.
+ a.forward_dependent_configs().push_back(LabelTargetPair(&b));
+
+ c.OnResolved();
+ b.OnResolved();
+ a.OnResolved();
+
+ // The config should now be on A, and in A's direct dependent configs.
+ ASSERT_EQ(1u, a.configs().size());
+ ASSERT_EQ(&direct, a.configs()[0].ptr);
+ ASSERT_EQ(1u, a.direct_dependent_configs().size());
+ ASSERT_EQ(&direct, a.direct_dependent_configs()[0].ptr);
+}
diff --git a/tools/gritsettings/resource_ids b/tools/gritsettings/resource_ids
index 7a1c7f7369..4ecaabdde4 100644
--- a/tools/gritsettings/resource_ids
+++ b/tools/gritsettings/resource_ids
@@ -165,10 +165,15 @@
"chrome/common/extensions_api_resources.grd": {
"includes": [26500],
},
- # TODO(jamescook): Add extensions/extensions_resources.grd here.
+ "extensions/extensions_resources.grd": {
+ "includes": [26750],
+ },
"extensions/extensions_strings.grd": {
"messages": [27000],
},
+ "apps/shell/app_shell_resources.grd": {
+ "includes": [27400],
+ },
"chrome/browser/resources/memory_internals_resources.grd": {
"includes": [27500],
},
diff --git a/tools/ipc_fuzzer/mutate/mutate.gyp b/tools/ipc_fuzzer/mutate/mutate.gyp
index 7b13599d22..b2122c8604 100644
--- a/tools/ipc_fuzzer/mutate/mutate.gyp
+++ b/tools/ipc_fuzzer/mutate/mutate.gyp
@@ -27,6 +27,15 @@
'rand_util.h',
'rand_util.cc',
],
+ 'conditions': [
+ ['asan==1', {
+ 'cflags!': [
+ # Compiling mutate.cc with ASan takes too long, see
+ # http://crbug.com/360158.
+ '-fsanitize=address',
+ ],
+ }],
+ ],
'include_dirs': [
'../../..',
],
@@ -54,6 +63,15 @@
'rand_util.h',
'rand_util.cc',
],
+ 'conditions': [
+ ['asan==1', {
+ 'cflags!': [
+ # Compiling generate.cc with ASan takes too long, see
+ # http://crbug.com/360158.
+ '-fsanitize=address',
+ ],
+ }],
+ ],
'include_dirs': [
'../../..',
],
diff --git a/tools/json_schema_compiler/BUILD.gn b/tools/json_schema_compiler/BUILD.gn
new file mode 100644
index 0000000000..2d7c183807
--- /dev/null
+++ b/tools/json_schema_compiler/BUILD.gn
@@ -0,0 +1,13 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Utility sources against which generated API modules should be linked.
+source_set("generated_api_util") {
+ sources = [
+ "util.cc",
+ "util.h"
+ ]
+ deps = [ "//base" ]
+}
+
diff --git a/tools/json_schema_compiler/api_gen_util.target.darwin-arm64.mk b/tools/json_schema_compiler/api_gen_util.target.darwin-arm64.mk
new file mode 100644
index 0000000000..ff2c92dd71
--- /dev/null
+++ b/tools/json_schema_compiler/api_gen_util.target.darwin-arm64.mk
@@ -0,0 +1,247 @@
+# This file is generated by gyp; do not edit.
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := tools_json_schema_compiler_api_gen_util_gyp
+LOCAL_MODULE_SUFFIX := .a
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE_TARGET_ARCH := $(TARGET_$(GYP_VAR_PREFIX)ARCH)
+gyp_intermediate_dir := $(call local-intermediates-dir,,$(GYP_VAR_PREFIX))
+gyp_shared_intermediate_dir := $(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))
+
+# Make sure our deps are built first.
+GYP_TARGET_DEPENDENCIES :=
+
+GYP_GENERATED_OUTPUTS :=
+
+# Make sure our deps and generated files are built first.
+LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) $(GYP_GENERATED_OUTPUTS)
+
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_GENERATED_SOURCES :=
+
+GYP_COPIED_SOURCE_ORIGIN_DIRS :=
+
+LOCAL_SRC_FILES := \
+ tools/json_schema_compiler/util.cc
+
+
+# Flags passed to both C and C++ files.
+MY_CFLAGS_Debug := \
+ --param=ssp-buffer-size=4 \
+ -Werror \
+ -fno-exceptions \
+ -fno-strict-aliasing \
+ -Wall \
+ -Wno-unused-parameter \
+ -Wno-missing-field-initializers \
+ -fvisibility=hidden \
+ -pipe \
+ -fPIC \
+ -Wno-unused-local-typedefs \
+ -ffunction-sections \
+ -funwind-tables \
+ -g \
+ -fno-short-enums \
+ -finline-limit=64 \
+ -Wa,--noexecstack \
+ -U_FORTIFY_SOURCE \
+ -Wno-extra \
+ -Wno-ignored-qualifiers \
+ -Wno-type-limits \
+ -Wno-unused-but-set-variable \
+ -Os \
+ -g \
+ -fomit-frame-pointer \
+ -fdata-sections \
+ -ffunction-sections \
+ -funwind-tables
+
+MY_DEFS_Debug := \
+ '-DV8_DEPRECATION_WARNINGS' \
+ '-DBLINK_SCALE_FILTERS_AT_RECORD_TIME' \
+ '-D_FILE_OFFSET_BITS=64' \
+ '-DNO_TCMALLOC' \
+ '-DDISABLE_NACL' \
+ '-DCHROMIUM_BUILD' \
+ '-DUSE_LIBJPEG_TURBO=1' \
+ '-DENABLE_WEBRTC=1' \
+ '-DUSE_PROPRIETARY_CODECS' \
+ '-DENABLE_CONFIGURATION_POLICY' \
+ '-DDISCARDABLE_MEMORY_ALWAYS_SUPPORTED_NATIVELY' \
+ '-DSYSTEM_NATIVELY_SIGNALS_MEMORY_PRESSURE' \
+ '-DENABLE_EGLIMAGE=1' \
+ '-DCLD_VERSION=1' \
+ '-DENABLE_PRINTING=1' \
+ '-DENABLE_MANAGED_USERS=1' \
+ '-DUSE_OPENSSL=1' \
+ '-DUSE_OPENSSL_CERTS=1' \
+ '-D__STDC_CONSTANT_MACROS' \
+ '-D__STDC_FORMAT_MACROS' \
+ '-DANDROID' \
+ '-D__GNU_SOURCE=1' \
+ '-DUSE_STLPORT=1' \
+ '-D_STLP_USE_PTR_SPECIALIZATIONS=1' \
+ '-DCHROME_BUILD_ID=""' \
+ '-DDYNAMIC_ANNOTATIONS_ENABLED=1' \
+ '-DWTF_USE_DYNAMIC_ANNOTATIONS=1' \
+ '-D_DEBUG'
+
+
+# Include paths placed before CFLAGS/CPPFLAGS
+LOCAL_C_INCLUDES_Debug := \
+ $(gyp_shared_intermediate_dir)/shim_headers/ashmem/target \
+ $(LOCAL_PATH) \
+ $(PWD)/frameworks/wilhelm/include \
+ $(PWD)/bionic \
+ $(PWD)/external/stlport/stlport
+
+
+# Flags passed to only C++ (and not C) files.
+LOCAL_CPPFLAGS_Debug := \
+ -fno-rtti \
+ -fno-threadsafe-statics \
+ -fvisibility-inlines-hidden \
+ -Wsign-compare \
+ -Wno-non-virtual-dtor \
+ -Wno-sign-promo
+
+
+# Flags passed to both C and C++ files.
+MY_CFLAGS_Release := \
+ --param=ssp-buffer-size=4 \
+ -Werror \
+ -fno-exceptions \
+ -fno-strict-aliasing \
+ -Wall \
+ -Wno-unused-parameter \
+ -Wno-missing-field-initializers \
+ -fvisibility=hidden \
+ -pipe \
+ -fPIC \
+ -Wno-unused-local-typedefs \
+ -ffunction-sections \
+ -funwind-tables \
+ -g \
+ -fno-short-enums \
+ -finline-limit=64 \
+ -Wa,--noexecstack \
+ -U_FORTIFY_SOURCE \
+ -Wno-extra \
+ -Wno-ignored-qualifiers \
+ -Wno-type-limits \
+ -Wno-unused-but-set-variable \
+ -Os \
+ -fno-ident \
+ -fdata-sections \
+ -ffunction-sections \
+ -fomit-frame-pointer \
+ -funwind-tables
+
+MY_DEFS_Release := \
+ '-DV8_DEPRECATION_WARNINGS' \
+ '-DBLINK_SCALE_FILTERS_AT_RECORD_TIME' \
+ '-D_FILE_OFFSET_BITS=64' \
+ '-DNO_TCMALLOC' \
+ '-DDISABLE_NACL' \
+ '-DCHROMIUM_BUILD' \
+ '-DUSE_LIBJPEG_TURBO=1' \
+ '-DENABLE_WEBRTC=1' \
+ '-DUSE_PROPRIETARY_CODECS' \
+ '-DENABLE_CONFIGURATION_POLICY' \
+ '-DDISCARDABLE_MEMORY_ALWAYS_SUPPORTED_NATIVELY' \
+ '-DSYSTEM_NATIVELY_SIGNALS_MEMORY_PRESSURE' \
+ '-DENABLE_EGLIMAGE=1' \
+ '-DCLD_VERSION=1' \
+ '-DENABLE_PRINTING=1' \
+ '-DENABLE_MANAGED_USERS=1' \
+ '-DUSE_OPENSSL=1' \
+ '-DUSE_OPENSSL_CERTS=1' \
+ '-D__STDC_CONSTANT_MACROS' \
+ '-D__STDC_FORMAT_MACROS' \
+ '-DANDROID' \
+ '-D__GNU_SOURCE=1' \
+ '-DUSE_STLPORT=1' \
+ '-D_STLP_USE_PTR_SPECIALIZATIONS=1' \
+ '-DCHROME_BUILD_ID=""' \
+ '-DNDEBUG' \
+ '-DNVALGRIND' \
+ '-DDYNAMIC_ANNOTATIONS_ENABLED=0' \
+ '-D_FORTIFY_SOURCE=2'
+
+
+# Include paths placed before CFLAGS/CPPFLAGS
+LOCAL_C_INCLUDES_Release := \
+ $(gyp_shared_intermediate_dir)/shim_headers/ashmem/target \
+ $(LOCAL_PATH) \
+ $(PWD)/frameworks/wilhelm/include \
+ $(PWD)/bionic \
+ $(PWD)/external/stlport/stlport
+
+
+# Flags passed to only C++ (and not C) files.
+LOCAL_CPPFLAGS_Release := \
+ -fno-rtti \
+ -fno-threadsafe-statics \
+ -fvisibility-inlines-hidden \
+ -Wsign-compare \
+ -Wno-non-virtual-dtor \
+ -Wno-sign-promo
+
+
+LOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) $(MY_DEFS_$(GYP_CONFIGURATION))
+LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) $(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))
+LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))
+LOCAL_ASFLAGS := $(LOCAL_CFLAGS)
+### Rules for final target.
+
+LOCAL_LDFLAGS_Debug := \
+ -Wl,-z,now \
+ -Wl,-z,relro \
+ -Wl,--fatal-warnings \
+ -Wl,-z,noexecstack \
+ -fPIC \
+ -nostdlib \
+ -Wl,--no-undefined \
+ -Wl,--exclude-libs=ALL \
+ -Wl,--warn-shared-textrel \
+ -Wl,-O1 \
+ -Wl,--as-needed
+
+
+LOCAL_LDFLAGS_Release := \
+ -Wl,-z,now \
+ -Wl,-z,relro \
+ -Wl,--fatal-warnings \
+ -Wl,-z,noexecstack \
+ -fPIC \
+ -nostdlib \
+ -Wl,--no-undefined \
+ -Wl,--exclude-libs=ALL \
+ -Wl,-O1 \
+ -Wl,--as-needed \
+ -Wl,--gc-sections \
+ -Wl,--warn-shared-textrel
+
+
+LOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION))
+
+LOCAL_STATIC_LIBRARIES :=
+
+# Enable grouping to fix circular references
+LOCAL_GROUP_STATIC_LIBRARIES := true
+
+LOCAL_SHARED_LIBRARIES := \
+ libstlport \
+ libdl
+
+# Add target alias to "gyp_all_modules" target.
+.PHONY: gyp_all_modules
+gyp_all_modules: tools_json_schema_compiler_api_gen_util_gyp
+
+# Alias gyp target name.
+.PHONY: api_gen_util
+api_gen_util: tools_json_schema_compiler_api_gen_util_gyp
+
+include $(BUILD_STATIC_LIBRARY)
diff --git a/tools/json_schema_compiler/api_gen_util.target.linux-arm64.mk b/tools/json_schema_compiler/api_gen_util.target.linux-arm64.mk
new file mode 100644
index 0000000000..ff2c92dd71
--- /dev/null
+++ b/tools/json_schema_compiler/api_gen_util.target.linux-arm64.mk
@@ -0,0 +1,247 @@
+# This file is generated by gyp; do not edit.
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_CLASS := STATIC_LIBRARIES
+LOCAL_MODULE := tools_json_schema_compiler_api_gen_util_gyp
+LOCAL_MODULE_SUFFIX := .a
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE_TARGET_ARCH := $(TARGET_$(GYP_VAR_PREFIX)ARCH)
+gyp_intermediate_dir := $(call local-intermediates-dir,,$(GYP_VAR_PREFIX))
+gyp_shared_intermediate_dir := $(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))
+
+# Make sure our deps are built first.
+GYP_TARGET_DEPENDENCIES :=
+
+GYP_GENERATED_OUTPUTS :=
+
+# Make sure our deps and generated files are built first.
+LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) $(GYP_GENERATED_OUTPUTS)
+
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_GENERATED_SOURCES :=
+
+GYP_COPIED_SOURCE_ORIGIN_DIRS :=
+
+LOCAL_SRC_FILES := \
+ tools/json_schema_compiler/util.cc
+
+
+# Flags passed to both C and C++ files.
+MY_CFLAGS_Debug := \
+ --param=ssp-buffer-size=4 \
+ -Werror \
+ -fno-exceptions \
+ -fno-strict-aliasing \
+ -Wall \
+ -Wno-unused-parameter \
+ -Wno-missing-field-initializers \
+ -fvisibility=hidden \
+ -pipe \
+ -fPIC \
+ -Wno-unused-local-typedefs \
+ -ffunction-sections \
+ -funwind-tables \
+ -g \
+ -fno-short-enums \
+ -finline-limit=64 \
+ -Wa,--noexecstack \
+ -U_FORTIFY_SOURCE \
+ -Wno-extra \
+ -Wno-ignored-qualifiers \
+ -Wno-type-limits \
+ -Wno-unused-but-set-variable \
+ -Os \
+ -g \
+ -fomit-frame-pointer \
+ -fdata-sections \
+ -ffunction-sections \
+ -funwind-tables
+
+MY_DEFS_Debug := \
+ '-DV8_DEPRECATION_WARNINGS' \
+ '-DBLINK_SCALE_FILTERS_AT_RECORD_TIME' \
+ '-D_FILE_OFFSET_BITS=64' \
+ '-DNO_TCMALLOC' \
+ '-DDISABLE_NACL' \
+ '-DCHROMIUM_BUILD' \
+ '-DUSE_LIBJPEG_TURBO=1' \
+ '-DENABLE_WEBRTC=1' \
+ '-DUSE_PROPRIETARY_CODECS' \
+ '-DENABLE_CONFIGURATION_POLICY' \
+ '-DDISCARDABLE_MEMORY_ALWAYS_SUPPORTED_NATIVELY' \
+ '-DSYSTEM_NATIVELY_SIGNALS_MEMORY_PRESSURE' \
+ '-DENABLE_EGLIMAGE=1' \
+ '-DCLD_VERSION=1' \
+ '-DENABLE_PRINTING=1' \
+ '-DENABLE_MANAGED_USERS=1' \
+ '-DUSE_OPENSSL=1' \
+ '-DUSE_OPENSSL_CERTS=1' \
+ '-D__STDC_CONSTANT_MACROS' \
+ '-D__STDC_FORMAT_MACROS' \
+ '-DANDROID' \
+ '-D__GNU_SOURCE=1' \
+ '-DUSE_STLPORT=1' \
+ '-D_STLP_USE_PTR_SPECIALIZATIONS=1' \
+ '-DCHROME_BUILD_ID=""' \
+ '-DDYNAMIC_ANNOTATIONS_ENABLED=1' \
+ '-DWTF_USE_DYNAMIC_ANNOTATIONS=1' \
+ '-D_DEBUG'
+
+
+# Include paths placed before CFLAGS/CPPFLAGS
+LOCAL_C_INCLUDES_Debug := \
+ $(gyp_shared_intermediate_dir)/shim_headers/ashmem/target \
+ $(LOCAL_PATH) \
+ $(PWD)/frameworks/wilhelm/include \
+ $(PWD)/bionic \
+ $(PWD)/external/stlport/stlport
+
+
+# Flags passed to only C++ (and not C) files.
+LOCAL_CPPFLAGS_Debug := \
+ -fno-rtti \
+ -fno-threadsafe-statics \
+ -fvisibility-inlines-hidden \
+ -Wsign-compare \
+ -Wno-non-virtual-dtor \
+ -Wno-sign-promo
+
+
+# Flags passed to both C and C++ files.
+MY_CFLAGS_Release := \
+ --param=ssp-buffer-size=4 \
+ -Werror \
+ -fno-exceptions \
+ -fno-strict-aliasing \
+ -Wall \
+ -Wno-unused-parameter \
+ -Wno-missing-field-initializers \
+ -fvisibility=hidden \
+ -pipe \
+ -fPIC \
+ -Wno-unused-local-typedefs \
+ -ffunction-sections \
+ -funwind-tables \
+ -g \
+ -fno-short-enums \
+ -finline-limit=64 \
+ -Wa,--noexecstack \
+ -U_FORTIFY_SOURCE \
+ -Wno-extra \
+ -Wno-ignored-qualifiers \
+ -Wno-type-limits \
+ -Wno-unused-but-set-variable \
+ -Os \
+ -fno-ident \
+ -fdata-sections \
+ -ffunction-sections \
+ -fomit-frame-pointer \
+ -funwind-tables
+
+MY_DEFS_Release := \
+ '-DV8_DEPRECATION_WARNINGS' \
+ '-DBLINK_SCALE_FILTERS_AT_RECORD_TIME' \
+ '-D_FILE_OFFSET_BITS=64' \
+ '-DNO_TCMALLOC' \
+ '-DDISABLE_NACL' \
+ '-DCHROMIUM_BUILD' \
+ '-DUSE_LIBJPEG_TURBO=1' \
+ '-DENABLE_WEBRTC=1' \
+ '-DUSE_PROPRIETARY_CODECS' \
+ '-DENABLE_CONFIGURATION_POLICY' \
+ '-DDISCARDABLE_MEMORY_ALWAYS_SUPPORTED_NATIVELY' \
+ '-DSYSTEM_NATIVELY_SIGNALS_MEMORY_PRESSURE' \
+ '-DENABLE_EGLIMAGE=1' \
+ '-DCLD_VERSION=1' \
+ '-DENABLE_PRINTING=1' \
+ '-DENABLE_MANAGED_USERS=1' \
+ '-DUSE_OPENSSL=1' \
+ '-DUSE_OPENSSL_CERTS=1' \
+ '-D__STDC_CONSTANT_MACROS' \
+ '-D__STDC_FORMAT_MACROS' \
+ '-DANDROID' \
+ '-D__GNU_SOURCE=1' \
+ '-DUSE_STLPORT=1' \
+ '-D_STLP_USE_PTR_SPECIALIZATIONS=1' \
+ '-DCHROME_BUILD_ID=""' \
+ '-DNDEBUG' \
+ '-DNVALGRIND' \
+ '-DDYNAMIC_ANNOTATIONS_ENABLED=0' \
+ '-D_FORTIFY_SOURCE=2'
+
+
+# Include paths placed before CFLAGS/CPPFLAGS
+LOCAL_C_INCLUDES_Release := \
+ $(gyp_shared_intermediate_dir)/shim_headers/ashmem/target \
+ $(LOCAL_PATH) \
+ $(PWD)/frameworks/wilhelm/include \
+ $(PWD)/bionic \
+ $(PWD)/external/stlport/stlport
+
+
+# Flags passed to only C++ (and not C) files.
+LOCAL_CPPFLAGS_Release := \
+ -fno-rtti \
+ -fno-threadsafe-statics \
+ -fvisibility-inlines-hidden \
+ -Wsign-compare \
+ -Wno-non-virtual-dtor \
+ -Wno-sign-promo
+
+
+LOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) $(MY_DEFS_$(GYP_CONFIGURATION))
+LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) $(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))
+LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))
+LOCAL_ASFLAGS := $(LOCAL_CFLAGS)
+### Rules for final target.
+
+LOCAL_LDFLAGS_Debug := \
+ -Wl,-z,now \
+ -Wl,-z,relro \
+ -Wl,--fatal-warnings \
+ -Wl,-z,noexecstack \
+ -fPIC \
+ -nostdlib \
+ -Wl,--no-undefined \
+ -Wl,--exclude-libs=ALL \
+ -Wl,--warn-shared-textrel \
+ -Wl,-O1 \
+ -Wl,--as-needed
+
+
+LOCAL_LDFLAGS_Release := \
+ -Wl,-z,now \
+ -Wl,-z,relro \
+ -Wl,--fatal-warnings \
+ -Wl,-z,noexecstack \
+ -fPIC \
+ -nostdlib \
+ -Wl,--no-undefined \
+ -Wl,--exclude-libs=ALL \
+ -Wl,-O1 \
+ -Wl,--as-needed \
+ -Wl,--gc-sections \
+ -Wl,--warn-shared-textrel
+
+
+LOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION))
+
+LOCAL_STATIC_LIBRARIES :=
+
+# Enable grouping to fix circular references
+LOCAL_GROUP_STATIC_LIBRARIES := true
+
+LOCAL_SHARED_LIBRARIES := \
+ libstlport \
+ libdl
+
+# Add target alias to "gyp_all_modules" target.
+.PHONY: gyp_all_modules
+gyp_all_modules: tools_json_schema_compiler_api_gen_util_gyp
+
+# Alias gyp target name.
+.PHONY: api_gen_util
+api_gen_util: tools_json_schema_compiler_api_gen_util_gyp
+
+include $(BUILD_STATIC_LIBRARY)
diff --git a/tools/json_schema_compiler/cc_generator.py b/tools/json_schema_compiler/cc_generator.py
index be3126a879..fbd47ffe32 100644
--- a/tools/json_schema_compiler/cc_generator.py
+++ b/tools/json_schema_compiler/cc_generator.py
@@ -346,24 +346,23 @@ class _Generator(object):
)
for prop in type_.properties.values():
+ prop_var = 'this->%s' % prop.unix_name
if prop.optional:
# Optional enum values are generated with a NONE enum value.
underlying_type = self._type_helper.FollowRef(prop.type_)
if underlying_type.property_type == PropertyType.ENUM:
c.Sblock('if (%s != %s) {' %
- (prop.unix_name,
+ (prop_var,
self._type_helper.GetEnumNoneValue(prop.type_)))
else:
- c.Sblock('if (%s.get()) {' % prop.unix_name)
+ c.Sblock('if (%s.get()) {' % prop_var)
# ANY is a base::Value which is abstract and cannot be a direct member, so
# it will always be a pointer.
is_ptr = prop.optional or prop.type_.property_type == PropertyType.ANY
c.Append('value->SetWithoutPathExpansion("%s", %s);' % (
prop.name,
- self._CreateValueFromType(prop.type_,
- 'this->%s' % prop.unix_name,
- is_ptr=is_ptr)))
+ self._CreateValueFromType(prop.type_, prop_var, is_ptr=is_ptr)))
if prop.optional:
c.Eblock('}')
diff --git a/tools/json_schema_compiler/test/tabs.json b/tools/json_schema_compiler/test/tabs.json
index d7271e6d80..7dca080e99 100644
--- a/tools/json_schema_compiler/test/tabs.json
+++ b/tools/json_schema_compiler/test/tabs.json
@@ -491,7 +491,7 @@
{
"name": "captureVisibleTab",
"type": "function",
- "description": "Captures the visible area of the currently active tab in the specified window. You must have <a href='manifest.html#permissions'>host permission</a> for the URL displayed by the tab.",
+ "description": "Captures the visible area of the currently active tab in the specified window. You must have <a href='manifest.html#permissions'>&lt;all_urls&gt;</a> permission to use this method.",
"parameters": [
{
"type": "integer",
diff --git a/tools/linux/dump-static-initializers.py b/tools/linux/dump-static-initializers.py
index 865559b932..0e83456d78 100755
--- a/tools/linux/dump-static-initializers.py
+++ b/tools/linux/dump-static-initializers.py
@@ -214,7 +214,10 @@ def main():
ref_output.append(ref)
if opts.diffable:
- print '\n'.join('# ' + qualified_filename + ' ' + r for r in ref_output)
+ if ref_output:
+ print '\n'.join('# ' + qualified_filename + ' ' + r for r in ref_output)
+ else:
+ print '# %s: (empty initializer list)' % qualified_filename
else:
print '%s (initializer offset 0x%x size 0x%x)' % (qualified_filename,
addr, size)
diff --git a/tools/memory_inspector/PRESUBMIT.py b/tools/memory_inspector/PRESUBMIT.py
index 7bfe22a527..5d0e21e3d3 100644
--- a/tools/memory_inspector/PRESUBMIT.py
+++ b/tools/memory_inspector/PRESUBMIT.py
@@ -13,7 +13,11 @@ def CommonChecks(input_api, output_api):
output = []
blacklist = [r'classification_rules.*']
output.extend(input_api.canned_checks.RunPylint(
- input_api, output_api, black_list=blacklist))
+ input_api, output_api, black_list=blacklist,
+ extra_paths_list=[
+ input_api.os_path.join(input_api.PresubmitLocalPath(), '..', '..',
+ 'build', 'android')
+ ]))
output.extend(input_api.canned_checks.RunUnitTests(
input_api,
output_api,
diff --git a/tools/memory_inspector/memory_inspector/backends/android/android_backend.py b/tools/memory_inspector/memory_inspector/backends/android/android_backend.py
index b75e723941..4bae745d17 100644
--- a/tools/memory_inspector/memory_inspector/backends/android/android_backend.py
+++ b/tools/memory_inspector/memory_inspector/backends/android/android_backend.py
@@ -25,8 +25,9 @@ from memory_inspector.core import symbol
# The memory_inspector/__init__ module will add the <CHROME_SRC>/build/android
# deps to the PYTHONPATH for pylib.
-from pylib import android_commands # pylint: disable=F0401
-from pylib.symbols import elf_symbolizer # pylint: disable=F0401
+from pylib import android_commands
+from pylib.device import device_utils
+from pylib.symbols import elf_symbolizer
_MEMDUMP_PREBUILT_PATH = os.path.join(constants.PROJECT_SRC,
@@ -62,7 +63,7 @@ class AndroidBackend(backends.Backend):
device = self._devices.get(device_id)
if not device:
device = AndroidDevice(
- self, android_commands.AndroidCommands(device_id))
+ self, device_utils.DeviceUtils(device_id))
self._devices[device_id] = device
yield device
@@ -157,13 +158,13 @@ class AndroidDevice(backends.Device):
_SETTINGS_KEYS = {
'native_symbol_paths': 'Comma-separated list of native libs search path'}
- def __init__(self, backend, adb):
+ def __init__(self, backend, underlying_device):
super(AndroidDevice, self).__init__(
backend=backend,
settings=backends.Settings(AndroidDevice._SETTINGS_KEYS))
- self.adb = adb
- self._id = adb.GetDevice()
- self._name = adb.GetProductModel()
+ self.underlying_device = underlying_device
+ self._id = underlying_device.old_interface.GetDevice()
+ self._name = underlying_device.old_interface.GetProductModel()
self._sys_stats = None
self._last_device_stats = None
self._sys_stats_last_update = None
@@ -172,7 +173,7 @@ class AndroidDevice(backends.Device):
def Initialize(self):
"""Starts adb root and deploys the prebuilt binaries on initialization."""
- self.adb.EnableAdbRoot()
+ self.underlying_device.old_interface.EnableAdbRoot()
# Download (from GCS) and deploy prebuilt helper binaries on the device.
self._DeployPrebuiltOnDeviceIfNeeded(_MEMDUMP_PREBUILT_PATH,
@@ -183,16 +184,18 @@ class AndroidDevice(backends.Device):
def IsNativeTracingEnabled(self):
"""Checks for the libc.debug.malloc system property."""
- return bool(self.adb.system_properties[_DLMALLOC_DEBUG_SYSPROP])
+ return bool(self.underlying_device.old_interface.system_properties[
+ _DLMALLOC_DEBUG_SYSPROP])
def EnableNativeTracing(self, enabled):
"""Enables libc.debug.malloc and restarts the shell."""
assert(self._initialized)
prop_value = '1' if enabled else ''
- self.adb.system_properties[_DLMALLOC_DEBUG_SYSPROP] = prop_value
+ self.underlying_device.old_interface.system_properties[
+ _DLMALLOC_DEBUG_SYSPROP] = prop_value
assert(self.IsNativeTracingEnabled())
# The libc.debug property takes effect only after restarting the Zygote.
- self.adb.RestartShell()
+ self.underlying_device.old_interface.RestartShell()
def ListProcesses(self):
"""Returns a sequence of |AndroidProcess|."""
@@ -248,7 +251,9 @@ class AndroidDevice(backends.Device):
datetime.datetime.now() - self._sys_stats_last_update <= max_ttl):
return self._sys_stats
- dump_out = '\n'.join(self.adb.RunShellCommand(_PSEXT_PATH_ON_DEVICE))
+ dump_out = '\n'.join(
+ self.underlying_device.old_interface.RunShellCommand(
+ _PSEXT_PATH_ON_DEVICE))
stats = json.loads(dump_out)
assert(all([x in stats for x in ['cpu', 'processes', 'time', 'mem']])), (
'ps_ext returned a malformed JSON dictionary.')
@@ -275,11 +280,13 @@ class AndroidDevice(backends.Device):
prebuilts_fetcher.GetIfChanged(local_path)
with open(local_path, 'rb') as f:
local_hash = hashlib.md5(f.read()).hexdigest()
- device_md5_out = self.adb.RunShellCommand('md5 "%s"' % path_on_device)
+ device_md5_out = self.underlying_device.old_interface.RunShellCommand(
+ 'md5 "%s"' % path_on_device)
if local_hash in device_md5_out:
return
- self.adb.Adb().Push(local_path, path_on_device)
- self.adb.RunShellCommand('chmod 755 "%s"' % path_on_device)
+ self.underlying_device.old_interface.Adb().Push(local_path, path_on_device)
+ self.underlying_device.old_interface.RunShellCommand(
+ 'chmod 755 "%s"' % path_on_device)
@property
def name(self):
@@ -302,7 +309,7 @@ class AndroidProcess(backends.Process):
def DumpMemoryMaps(self):
"""Grabs and parses memory maps through memdump."""
cmd = '%s %d' % (_MEMDUMP_PATH_ON_DEVICE, self.pid)
- dump_out = self.device.adb.RunShellCommand(cmd)
+ dump_out = self.device.underlying_device.old_interface.RunShellCommand(cmd)
return memdump_parser.Parse(dump_out)
def DumpNativeHeap(self):
@@ -310,12 +317,14 @@ class AndroidProcess(backends.Process):
# TODO(primiano): grab also mmap bt (depends on pending framework change).
dump_file_path = _DUMPHEAP_OUT_FILE_PATH % self.pid
cmd = 'am dumpheap -n %d %s' % (self.pid, dump_file_path)
- self.device.adb.RunShellCommand(cmd)
+ self.device.underlying_device.old_interface.RunShellCommand(cmd)
# TODO(primiano): Some pre-KK versions of Android might need a sleep here
# as, IIRC, 'am dumpheap' did not wait for the dump to be completed before
# returning. Double check this and either add a sleep or remove this TODO.
- dump_out = self.device.adb.GetFileContents(dump_file_path)
- self.device.adb.RunShellCommand('rm %s' % dump_file_path)
+ dump_out = self.device.underlying_device.old_interface.GetFileContents(
+ dump_file_path)
+ self.device.underlying_device.old_interface.RunShellCommand(
+ 'rm %s' % dump_file_path)
return dumpheap_native_parser.Parse(dump_out)
def GetStats(self):
diff --git a/tools/metrics/actions/actions.xml b/tools/metrics/actions/actions.xml
index 8c21bfdb7a..cacb05faee 100644
--- a/tools/metrics/actions/actions.xml
+++ b/tools/metrics/actions/actions.xml
@@ -8690,6 +8690,11 @@ should be able to be added at any place in this file.
<description>Please enter the description of this user action.</description>
</action>
+<action name="Options_SetTimeDialog_Show">
+ <owner>michaelpg@chromium.org</owner>
+ <description>Set system time dialog was launched.</description>
+</action>
+
<action name="Options_ShowAutoFillSettings">
<owner>Please list the metric's owners. Add more owner tags as needed.</owner>
<description>Please enter the description of this user action.</description>
diff --git a/tools/metrics/histograms/histograms.xml b/tools/metrics/histograms/histograms.xml
index 56a99848ec..54b82f08a0 100644
--- a/tools/metrics/histograms/histograms.xml
+++ b/tools/metrics/histograms/histograms.xml
@@ -8571,6 +8571,26 @@ Therefore, the affected-histogram name has to have at least one dot in it.
<summary>Records events related to devices page.</summary>
</histogram>
+<histogram name="LocalDiscovery.FirewallAccessTime" units="milliseconds">
+ <owner>noamsml@chromium.org</owner>
+ <owner>vitalybuka@chromium.org</owner>
+ <summary>
+ Windows only histogram that reports request time spend accessing firewall
+ rules. It's logged once per browser process lifetime, when local discovery
+ is used first time.
+ </summary>
+</histogram>
+
+<histogram name="LocalDiscovery.FirewallState" enum="BooleanEnabled">
+ <owner>noamsml@chromium.org</owner>
+ <owner>vitalybuka@chromium.org</owner>
+ <summary>
+ Windows only histogram that reports, whether a firewall is set, so we can
+ bind inbound sockets. It's logged once per browser process lifetime, when
+ local discovery is used first time.
+ </summary>
+</histogram>
+
<histogram name="LocalDiscovery.PrivetNotificationsEvent"
enum="PrivetNotificationsEvent">
<owner>noamsml@chromium.org</owner>
@@ -11651,6 +11671,15 @@ Therefore, the affected-histogram name has to have at least one dot in it.
</summary>
</histogram>
+<histogram name="Net.ErrorPageCounts" enum="NetErrorPageEvents">
+ <owner>rdsmith@chromium.org</owner>
+ <owner>ellyjones@chromium.org</owner>
+ <summary>
+ Counts of various events that can occur on the network error page. See the
+ histogram for details.
+ </summary>
+</histogram>
+
<histogram name="Net.FileError_Flush">
<owner>Please list the metric's owners. Add more owner tags as needed.</owner>
<summary>
@@ -13334,6 +13363,16 @@ Therefore, the affected-histogram name has to have at least one dot in it.
</summary>
</histogram>
+<histogram name="Net.SpdyHpackEncodedCharacterFrequency" units="ASCII codes">
+ <owner>jgraettinger@chromium.org</owner>
+ <summary>
+ Frequencies of characters observed in request and response headers.
+ Temporarily being collected to inform the construction of an optimized
+ Huffman code for the HTTP/2 specification. Buckets are ASCII codes offset by
+ 1.
+ </summary>
+</histogram>
+
<histogram name="Net.SpdyIPPoolDomainMatch" enum="SpdyIPPoolDomainMatch"
units="count">
<owner>Please list the metric's owners. Add more owner tags as needed.</owner>
@@ -13989,6 +14028,28 @@ Therefore, the affected-histogram name has to have at least one dot in it.
<summary>The time spent in closesocket call in UDPSocketWin::Close.</summary>
</histogram>
+<histogram name="Net.WebSocket.DeflateMode"
+ enum="WebSocketNewPerMessageDeflateContextTakeoverMode">
+ <owner>yhirano@chromium.org</owner>
+ <owner>ricea@chromium.org</owner>
+ <owner>tyoshino@chromium.org</owner>
+ <summary>
+ Count the number of WebSockets that accepted permessage-deflate extension
+ for each context take over mode. Used by the new Chromium-based WebSocket
+ implementation.
+ </summary>
+</histogram>
+
+<histogram name="Net.WebSocket.Duration" units="milliseconds">
+ <owner>yhirano@chromium.org</owner>
+ <owner>ricea@chromium.org</owner>
+ <owner>tyoshino@chromium.org</owner>
+ <summary>
+ The time from a WebSocket is successfully opened until it's closed. Used to
+ study how WebSockets are used.
+ </summary>
+</histogram>
+
<histogram name="Net.WebSocket.HandshakeResult"
enum="WebSocketNewHandshakeResult">
<owner>yhirano@chromium.org</owner>
@@ -15789,6 +15850,9 @@ Therefore, the affected-histogram name has to have at least one dot in it.
<histogram name="NewTabPage.BookmarkActionAndroid"
enum="NewTabPageBookmarkActionAndroid">
+ <obsolete>
+ Deprecated on M33 with the change to native NTP.
+ </obsolete>
<owner>Please list the metric's owners. Add more owner tags as needed.</owner>
<summary>
Actions taken by users on partner bookmarks (editing / renaming) on the NTP
@@ -15818,6 +15882,9 @@ Therefore, the affected-histogram name has to have at least one dot in it.
</histogram>
<histogram name="NewTabPage.MobilePromo" enum="NewTabPageMobilePromo">
+ <obsolete>
+ Deprecated on M33 with the change to native NTP.
+ </obsolete>
<owner>Please list the metric's owners. Add more owner tags as needed.</owner>
<summary>
Android: Tallies counts for how the user interacted with the NTP promo page.
@@ -23121,6 +23188,47 @@ Therefore, the affected-histogram name has to have at least one dot in it.
</summary>
</histogram>
+<histogram name="SharedWorker.RendererSurviveForWorkerTime"
+ units="milliseconds">
+ <owner>horo@chromium.org</owner>
+ <summary>
+ A survival time of RenderProcessHostImpl for the In-renderer Shared Worker
+ from when FastShutdownIfPossible() is called.
+ </summary>
+</histogram>
+
+<histogram name="SharedWorker.TimeToDeleted" units="milliseconds">
+ <owner>horo@chromium.org</owner>
+ <summary>
+ The lifetime of a SharedWorkerHost. This roughly corresponds to the lifetime
+ of SharedWorker.
+ </summary>
+</histogram>
+
+<histogram name="SharedWorker.TimeToScriptLoaded" units="milliseconds">
+ <owner>horo@chromium.org</owner>
+ <summary>
+ The time from the creation of SharedWorkerHost until when WorkerScriptLoaded
+ is called.
+ </summary>
+</histogram>
+
+<histogram name="SharedWorker.TimeToScriptLoadFailed" units="milliseconds">
+ <owner>horo@chromium.org</owner>
+ <summary>
+ The time from the creation of SharedWorkerHost until when
+ WorkerScriptLoadFailed is called.
+ </summary>
+</histogram>
+
+<histogram name="ShortcutsProvider.QueryIndexTime" units="milliseconds">
+ <owner>davidben@chromium.org</owner>
+ <summary>
+ The time it takes for the ShortcutsProvider to perform a query after the
+ user has typed N characters.
+ </summary>
+</histogram>
+
<histogram name="Signin.OneClickConfirmation" enum="SigninFlowConfirmations">
<owner>Please list the metric's owners. Add more owner tags as needed.</owner>
<summary>
@@ -29752,10 +29860,23 @@ Therefore, the affected-histogram name has to have at least one dot in it.
<histogram name="WebCore.WebSocket.PerMessageDeflateContextTakeOverMode"
enum="WebSocketPerMessageDeflateContextTakeOverMode">
- <owner>Please list the metric's owners. Add more owner tags as needed.</owner>
+ <owner>yhirano@chromium.org</owner>
+ <owner>ricea@chromium.org</owner>
+ <owner>tyoshino@chromium.org</owner>
<summary>
Count the number of WebSockets that accepted permessage-deflate extension
- for each context take over mode.
+ for each context take over mode. Used by the old Blink-based WebSocket
+ implementation.
+ </summary>
+</histogram>
+
+<histogram name="WebCore.WebSocket.SendType" enum="WebSocketSendType">
+ <owner>yhirano@chromium.org</owner>
+ <owner>ricea@chromium.org</owner>
+ <owner>tyoshino@chromium.org</owner>
+ <summary>
+ Count the number of send() method calls on WebSockets for each argument
+ type.
</summary>
</histogram>
@@ -30133,6 +30254,13 @@ Therefore, the affected-histogram name has to have at least one dot in it.
</summary>
</histogram>
+<histogram name="WebRTC.DesktopCaptureCounters" enum="DesktopCaptureCounters">
+ <owner>jiayl@chromium.org</owner>
+ <summary>
+ Counters on creation of DesktopCaptureDevice and the first capture call.
+ </summary>
+</histogram>
+
<histogram name="WebRTC.NumDataChannelsPerPeerConnection">
<owner>perkj@chromium.org</owner>
<summary>
@@ -30171,6 +30299,11 @@ Therefore, the affected-histogram name has to have at least one dot in it.
</summary>
</histogram>
+<histogram name="WebRTC.ScreenCaptureTime" units="milliseconds">
+ <owner>jiayl@chromium.org</owner>
+ <summary>Time for capturing one frame in screen capturing.</summary>
+</histogram>
+
<histogram name="WebRTC.SentAudioTrackDuration" units="milliseconds">
<owner>perkj@chromium.org</owner>
<summary>
@@ -30223,6 +30356,11 @@ Therefore, the affected-histogram name has to have at least one dot in it.
</summary>
</histogram>
+<histogram name="WebRTC.WindowCaptureTime" units="milliseconds">
+ <owner>jiayl@chromium.org</owner>
+ <summary>Time for capturing one frame in window capturing.</summary>
+</histogram>
+
<histogram name="Webstore.ExtensionInstallResult" enum="BooleanSuccess">
<owner>jackhou@chromium.org</owner>
<summary>
@@ -31608,6 +31746,15 @@ Therefore, the affected-histogram name has to have at least one dot in it.
<int value="2" label="Proxy available and enabled"/>
</enum>
+<enum name="DesktopCaptureCounters" type="int">
+ <int value="0" label="Screen capturer created."/>
+ <int value="1" label="Window capturer created."/>
+ <int value="2" label="First screen capture call succeeded."/>
+ <int value="3" label="First screen capture call failed."/>
+ <int value="4" label="First window capture call succeeded."/>
+ <int value="5" label="First window capture call failed."/>
+</enum>
+
<enum name="DevicePermissionActions" type="int">
<int value="0" label="AllowHttps"/>
<int value="1" label="AllowHttp"/>
@@ -32628,6 +32775,10 @@ Therefore, the affected-histogram name has to have at least one dot in it.
<int value="260" label="Media keys default to function keys"/>
<int value="261" label="Enable WPAD quick check"/>
<int value="262" label="Wallpaper image"/>
+ <int value="263"
+ label="Enable the use of relay servers by the remote access host"/>
+ <int value="264"
+ label="Restrict the UDP port range used by the remote access host"/>
</enum>
<enum name="EnterprisePolicyInvalidations" type="int">
@@ -33600,6 +33751,20 @@ Therefore, the affected-histogram name has to have at least one dot in it.
<int value="733" label="BLUETOOTHLOWENERGY_WRITEDESCRIPTORVALUE"/>
<int value="734" label="BOOKMARKMANAGERPRIVATE_CREATEWITHMETAINFO"/>
<int value="735" label="BOOKMARKMANAGERPRIVATE_UPDATEMETAINFO"/>
+ <int value="736" label="BLUETOOTHSOCKET_CREATE"/>
+ <int value="737" label="BLUETOOTHSOCKET_UPDATE"/>
+ <int value="738" label="BLUETOOTHSOCKET_SETPAUSED"/>
+ <int value="739" label="BLUETOOTHSOCKET_LISTENUSINGRFCOMM"/>
+ <int value="740" label="BLUETOOTHSOCKET_LISTENUSINGINSECURERFCOMM"/>
+ <int value="741" label="BLUETOOTHSOCKET_LISTENUSINGL2CAP"/>
+ <int value="742" label="BLUETOOTHSOCKET_CONNECT"/>
+ <int value="743" label="BLUETOOTHSOCKET_DISCONNECT"/>
+ <int value="744" label="BLUETOOTHSOCKET_CLOSE"/>
+ <int value="745" label="BLUETOOTHSOCKET_SEND"/>
+ <int value="746" label="BLUETOOTHSOCKET_GETINFO"/>
+ <int value="747" label="BLUETOOTHSOCKET_GETSOCKETS"/>
+ <int value="748" label="WEBSTOREPRIVATE_SIGNINFUNCTION"/>
+ <int value="749" label="SHELL_CREATEWINDOW"/>
</enum>
<enum name="ExtensionInstallCause" type="int">
@@ -37373,6 +37538,18 @@ Therefore, the affected-histogram name has to have at least one dot in it.
<int value="806" label="DNS_SORT_ERROR"/>
</enum>
+<enum name="NetErrorPageEvents" type="int">
+ <int value="0" label="Error Page Shown"/>
+ <int value="1" label="Reload Button Shown"/>
+ <int value="2" label="Reload Button Clicked"/>
+ <int value="3" label="Reload Button Click Load Error"/>
+ <int value="4" label="Load Stale Button Shown"/>
+ <int value="5" label="Load Stale Button Clicked"/>
+ <int value="6" label="Load Stale Button Click Load Error"/>
+ <int value="7" label="More Button Clicked"/>
+ <int value="8" label="Browser Initiated Reload"/>
+</enum>
+
<enum name="NetPreconnectUtilization" type="int">
<int value="0" label="non-speculative, never connected"/>
<int value="1" label="non-speculative, never used"/>
@@ -39265,6 +39442,8 @@ Therefore, the affected-histogram name has to have at least one dot in it.
<int value="6" label="CLOUD_DUPLICATE_SELECTED"/>
<int value="7" label="REGISTER_PROMO_SHOWN"/>
<int value="8" label="REGISTER_PROMO_SELECTED"/>
+ <int value="9" label="ACCOUNT_CHANGED"/>
+ <int value="10" label="ADD_ACCOUNT_SELECTED"/>
</enum>
<enum name="PrintPreviewUserActionType" type="int">
@@ -40459,11 +40638,12 @@ Therefore, the affected-histogram name has to have at least one dot in it.
</enum>
<enum name="SocketStreamConnectionType" type="int">
- <int value="0" label="none"/>
- <int value="1" label="all connections"/>
- <int value="2" label="tunnel connections"/>
- <int value="3" label="socks connections"/>
- <int value="4" label="ssl connections"/>
+ <int value="0" label="None"/>
+ <int value="1" label="All"/>
+ <int value="2" label="Tunnel"/>
+ <int value="3" label="SOCKS"/>
+ <int value="4" label="SSL"/>
+ <int value="5" label="Secure proxy"/>
</enum>
<enum name="SocketStreamProtocolType" type="int">
@@ -40560,6 +40740,13 @@ Therefore, the affected-histogram name has to have at least one dot in it.
<int value="29" label="GoAway Frame Corrupt"/>
<int value="30" label="RstStream Frame Corrupt"/>
<int value="31" label="Unexpected Frame (Expected Continuation)"/>
+<!-- More SpdyRstStreamStatus -->
+
+ <int value="32" label="Timeout waiting for settings acknowledgement"/>
+ <int value="33"
+ label="Connection established in response to CONNECT request was
+ abnormally closed"/>
+ <int value="34" label="Peer exhibiting suspect behavior."/>
</enum>
<enum name="SpdySessionGet" type="int">
@@ -42142,9 +42329,21 @@ Therefore, the affected-histogram name has to have at least one dot in it.
<int value="2" label="FAILED">Failed</int>
</enum>
+<enum name="WebSocketNewPerMessageDeflateContextTakeoverMode" type="int">
+ <int value="0" label="Do not take over"/>
+ <int value="1" label="Take over"/>
+</enum>
+
<enum name="WebSocketPerMessageDeflateContextTakeOverMode" type="int">
- <int value="0" label="DoNotTakeOverContext"/>
- <int value="1" label="TakeOverContext"/>
+ <int value="0" label="Do not take over"/>
+ <int value="1" label="Take over"/>
+</enum>
+
+<enum name="WebSocketSendType" type="int">
+ <int value="0" label="String"/>
+ <int value="1" label="ArrayBuffer"/>
+ <int value="2" label="ArrayBufferView"/>
+ <int value="3" label="Blob"/>
</enum>
<enum name="WiFiApMode" type="int">
@@ -44885,6 +45084,7 @@ Therefore, the affected-histogram name has to have at least one dot in it.
<suffix name="4" label="N = 4"/>
<suffix name="5" label="N = 5"/>
<affected-histogram name="Omnibox.QueryTime"/>
+ <affected-histogram name="ShortcutsProvider.QueryIndexTime"/>
</histogram_suffixes>
<histogram_suffixes name="QuicPortSelection" separator="">
diff --git a/tools/msan/blacklist.txt b/tools/msan/blacklist.txt
index 08607845a9..12e95d92fe 100644
--- a/tools/msan/blacklist.txt
+++ b/tools/msan/blacklist.txt
@@ -16,3 +16,7 @@ fun:unpack_RGB888
# http://crbug.com/363487
fun:*WebCore*RenderLayerCompositor*updateIfNeeded*
+
+# Fixed in clang r207227.
+# http://code.google.com/p/memory-sanitizer/issues/detail?id=53
+fun:getc_unlocked
diff --git a/tools/perf/OWNERS b/tools/perf/OWNERS
index 5e3172a822..3ae24034fd 100644
--- a/tools/perf/OWNERS
+++ b/tools/perf/OWNERS
@@ -6,4 +6,5 @@ hartmanng@chromium.org
marja@chromium.org
nduca@chromium.org
qyearsley@chromium.org
+skyostil@chromium.org
tonyg@chromium.org
diff --git a/tools/perf/benchmarks/OWNERS b/tools/perf/benchmarks/OWNERS
new file mode 100644
index 0000000000..b4a4e10f63
--- /dev/null
+++ b/tools/perf/benchmarks/OWNERS
@@ -0,0 +1,3 @@
+per-file *chrome_proxy*=bengr@chromium.org
+per-file *chrome_proxy*=bolian@chromium.org
+per-file *chrome_proxy*=marq@chromium.org
diff --git a/tools/perf/benchmarks/browsermark.py b/tools/perf/benchmarks/browsermark.py
index ac33464cfc..85953afece 100644
--- a/tools/perf/benchmarks/browsermark.py
+++ b/tools/perf/benchmarks/browsermark.py
@@ -24,7 +24,6 @@ from telemetry import test
from telemetry.page import page_measurement
from telemetry.page import page_set
-
class _BrowsermarkMeasurement(page_measurement.PageMeasurement):
def MeasurePage(self, _, tab, results):
@@ -49,12 +48,9 @@ class Browsermark(test.Test):
"""Browsermark suite tests CSS, DOM, resize, page load, WebGL and JS."""
test = _BrowsermarkMeasurement
def CreatePageSet(self, options):
- return page_set.PageSet.FromDict({
- 'archive_data_file': '../page_sets/data/browsermark.json',
- 'make_javascript_deterministic': False,
- 'pages': [
- { 'url':
- 'http://browsermark.rightware.com/tests/'}
- ]
- }, os.path.abspath(__file__))
-
+ ps = page_set.PageSet(
+ file_path=os.path.abspath(__file__),
+ archive_data_file='../page_sets/data/browsermark.json',
+ make_javascript_deterministic=False)
+ ps.AddPageWithDefaultRunNavigate('http://browsermark.rightware.com/tests/')
+ return ps
diff --git a/tools/perf/benchmarks/canvasmark.py b/tools/perf/benchmarks/canvasmark.py
index 56f3018290..f84610180e 100644
--- a/tools/perf/benchmarks/canvasmark.py
+++ b/tools/perf/benchmarks/canvasmark.py
@@ -15,6 +15,7 @@ from telemetry import test
from telemetry.page import page_measurement
from telemetry.page import page_set
+
class _CanvasMarkMeasurement(page_measurement.PageMeasurement):
def WillNavigateToPage(self, page, tab):
@@ -49,12 +50,10 @@ class CanvasMark(test.Test):
test = _CanvasMarkMeasurement
def CreatePageSet(self, options):
- return page_set.PageSet.FromDict({
- 'archive_data_file': '../page_sets/data/canvasmark.json',
- 'make_javascript_deterministic': False,
- 'pages': [
- { 'url':
- 'http://www.kevs3d.co.uk/dev/canvasmark/?auto=true'}
- ]
- }, os.path.abspath(__file__))
-
+ ps = page_set.PageSet(
+ file_path=os.path.abspath(__file__),
+ archive_data_file='../page_sets/data/canvasmark.json',
+ make_javascript_deterministic=False)
+ ps.AddPageWithDefaultRunNavigate(
+ 'http://www.kevs3d.co.uk/dev/canvasmark/?auto=true')
+ return ps
diff --git a/tools/perf/benchmarks/dom_perf.py b/tools/perf/benchmarks/dom_perf.py
index d78bfd4560..a20a2542ef 100644
--- a/tools/perf/benchmarks/dom_perf.py
+++ b/tools/perf/benchmarks/dom_perf.py
@@ -80,18 +80,20 @@ class DomPerf(test.Test):
def CreatePageSet(self, options):
dom_perf_dir = os.path.join(util.GetChromiumSrcDir(), 'data', 'dom_perf')
- base_page = 'file://run.html?reportInJS=1&run='
- return page_set.PageSet.FromDict({
- 'pages': [
- { 'url': base_page + 'Accessors' },
- { 'url': base_page + 'CloneNodes' },
- { 'url': base_page + 'CreateNodes' },
- { 'url': base_page + 'DOMDivWalk' },
- { 'url': base_page + 'DOMTable' },
- { 'url': base_page + 'DOMWalk' },
- { 'url': base_page + 'Events' },
- { 'url': base_page + 'Get+Elements' },
- { 'url': base_page + 'GridSort' },
- { 'url': base_page + 'Template' }
- ]
- }, dom_perf_dir)
+ run_params = [
+ 'Accessors',
+ 'CloneNodes',
+ 'CreateNodes',
+ 'DOMDivWalk',
+ 'DOMTable',
+ 'DOMWalk',
+ 'Events',
+ 'Get+Elements',
+ 'GridSort',
+ 'Template'
+ ]
+ ps = page_set.PageSet(file_path=dom_perf_dir)
+ for param in run_params:
+ ps.AddPageWithDefaultRunNavigate(
+ 'file://run.html?reportInJS=1&run=%s' % param)
+ return ps
diff --git a/tools/perf/benchmarks/dromaeo.py b/tools/perf/benchmarks/dromaeo.py
index 7903dfbb5a..f3f11d10e4 100644
--- a/tools/perf/benchmarks/dromaeo.py
+++ b/tools/perf/benchmarks/dromaeo.py
@@ -60,13 +60,12 @@ class _DromaeoBenchmark(test.Test):
# The docstring of benchmark classes may also be used as a description
# when 'run_benchmarks list' is run.
description = self.__doc__ or 'Dromaeo JavaScript Benchmark'
- page_set_dict = {
- 'description': description,
- 'pages': [{'url': url}],
- }
dromaeo_dir = os.path.join(util.GetChromiumSrcDir(),
'chrome', 'test', 'data', 'dromaeo')
- return page_set.PageSet.FromDict(page_set_dict, dromaeo_dir)
+ ps = page_set.PageSet(description=description,
+ file_path=dromaeo_dir)
+ ps.AddPageWithDefaultRunNavigate(url)
+ return ps
class DromaeoDomCoreAttr(_DromaeoBenchmark):
diff --git a/tools/perf/benchmarks/html5gaming.py b/tools/perf/benchmarks/html5gaming.py
index 341d0cdb71..98370b0c6c 100644
--- a/tools/perf/benchmarks/html5gaming.py
+++ b/tools/perf/benchmarks/html5gaming.py
@@ -16,7 +16,6 @@ from telemetry import test
from telemetry.page import page_measurement
from telemetry.page import page_set
-
class _HTML5GamingMeasurement(page_measurement.PageMeasurement):
def MeasurePage(self, _, tab, results):
tab.ExecuteJavaScript('benchmark();')
@@ -33,12 +32,9 @@ class HTML5Gaming(test.Test):
"""Imapct HTML5 smooth running games benchmark suite."""
test = _HTML5GamingMeasurement
def CreatePageSet(self, options):
- return page_set.PageSet.FromDict({
- 'archive_data_file': '../page_sets/data/html5gaming.json',
- 'make_javascript_deterministic': False,
- 'pages': [
- { 'url':
- 'http://html5-benchmark.com/'}
- ]
- }, os.path.abspath(__file__))
-
+ ps = page_set.PageSet(
+ file_path=os.path.abspath(__file__),
+ archive_data_file='../page_sets/data/html5gaming.json',
+ make_javascript_deterministic=False)
+ ps.AddPageWithDefaultRunNavigate('http://html5-benchmark.com/')
+ return ps
diff --git a/tools/perf/benchmarks/indexeddb_perf.py b/tools/perf/benchmarks/indexeddb_perf.py
index 1d62919722..e969d71b15 100644
--- a/tools/perf/benchmarks/indexeddb_perf.py
+++ b/tools/perf/benchmarks/indexeddb_perf.py
@@ -89,8 +89,6 @@ class IndexedDb(test.Test):
def CreatePageSet(self, options):
indexeddb_dir = os.path.join(util.GetChromiumSrcDir(), 'chrome', 'test',
'data', 'indexeddb')
- return page_set.PageSet.FromDict({
- 'pages': [
- { 'url': 'file://perf_test.html' }
- ]
- }, indexeddb_dir)
+ ps = page_set.PageSet(file_path=indexeddb_dir)
+ ps.AddPageWithDefaultRunNavigate('file://perf_test.html')
+ return ps
diff --git a/tools/perf/benchmarks/jsgamebench.py b/tools/perf/benchmarks/jsgamebench.py
index d91313dbce..c94f754f36 100644
--- a/tools/perf/benchmarks/jsgamebench.py
+++ b/tools/perf/benchmarks/jsgamebench.py
@@ -36,14 +36,14 @@ class _JsgamebenchMeasurement(page_measurement.PageMeasurement):
results.Add('Score', 'score (bigger is better)', result)
+@test.Disabled('linux') # crbug.com/365237
class Jsgamebench(test.Test):
"""Counts how many animating sprites can move around on the screen at once."""
test = _JsgamebenchMeasurement
def CreatePageSet(self, options):
- return page_set.PageSet.FromDict({
- 'archive_data_file': '../page_sets/data/jsgamebench.json',
- 'pages': [
- { 'url': 'http://localhost/' }
- ]
- }, os.path.dirname(__file__))
+ ps = page_set.PageSet(
+ archive_data_file='../page_sets/data/jsgamebench.json',
+ file_path=os.path.dirname(__file__))
+ ps.AddPageWithDefaultRunNavigate('http://localhost/')
+ return ps
diff --git a/tools/perf/benchmarks/kraken.py b/tools/perf/benchmarks/kraken.py
index cc1ba48d91..985325f7d2 100644
--- a/tools/perf/benchmarks/kraken.py
+++ b/tools/perf/benchmarks/kraken.py
@@ -54,9 +54,9 @@ class Kraken(test.Test):
test = _KrakenMeasurement
def CreatePageSet(self, options):
- return page_set.PageSet.FromDict({
- 'archive_data_file': '../page_sets/data/kraken.json',
- 'pages': [
- { 'url': 'http://krakenbenchmark.mozilla.org/kraken-1.1/driver.html' }
- ]
- }, os.path.abspath(__file__))
+ ps = page_set.PageSet(
+ archive_data_file='../page_sets/data/kraken.json',
+ file_path=os.path.abspath(__file__))
+ ps.AddPageWithDefaultRunNavigate(
+ 'http://krakenbenchmark.mozilla.org/kraken-1.1/driver.html')
+ return ps
diff --git a/tools/perf/benchmarks/maps.py b/tools/perf/benchmarks/maps.py
index 00fccd06d0..0d2c441784 100644
--- a/tools/perf/benchmarks/maps.py
+++ b/tools/perf/benchmarks/maps.py
@@ -11,7 +11,10 @@ import re
from telemetry import test
from telemetry.core import util
from telemetry.page import page_measurement
-from telemetry.page import page_set
+from telemetry.page import page as page_module
+from telemetry.page import page_set as page_set_module
+# pylint: disable=W0401,W0614
+from telemetry.page.actions.all_page_actions import *
class _MapsMeasurement(page_measurement.PageMeasurement):
@@ -24,6 +27,18 @@ class _MapsMeasurement(page_measurement.PageMeasurement):
results.Add('total_time', 'ms', total)
results.Add('render_mean_time', 'ms', render)
+class MapsPage(page_module.Page):
+ def __init__(self, page_set, base_dir):
+ super(MapsPage, self).__init__(
+ url='http://localhost:10020/tracker.html',
+ page_set=page_set,
+ base_dir=base_dir)
+
+ def RunNavigateSteps(self, action_runner):
+ action_runner.Run(NavigateAction())
+ action_runner.Run(WaitAction({'javascript': 'window.testDone'}))
+
+
@test.Disabled
class MapsBenchmark(test.Test):
"""Basic Google Maps benchmarks."""
@@ -32,21 +47,12 @@ class MapsBenchmark(test.Test):
def CreatePageSet(self, options):
page_set_path = os.path.join(
util.GetChromiumSrcDir(), 'tools', 'perf', 'page_sets')
- page_set_dict = {
- 'archive_data_file': 'data/maps.json',
- 'make_javascript_deterministic': False,
- 'pages': [
- {
- 'url': 'http://localhost:10020/tracker.html',
- 'navigate_steps' : [
- { 'action': 'navigate' },
- { 'action': 'wait', 'javascript': 'window.testDone' }
- ]
- }
- ]
- }
-
- return page_set.PageSet.FromDict(page_set_dict, page_set_path)
+ ps = page_set_module.PageSet(
+ archive_data_file='data/maps.json',
+ make_javascript_deterministic=False,
+ file_path=page_set_path)
+ ps.AddPage(MapsPage(ps, ps.base_dir))
+ return ps
class MapsNoVsync(MapsBenchmark):
"""Runs the Google Maps benchmark with Vsync disabled"""
diff --git a/tools/perf/benchmarks/octane.py b/tools/perf/benchmarks/octane.py
index 053f601322..4fc6a3e8d0 100644
--- a/tools/perf/benchmarks/octane.py
+++ b/tools/perf/benchmarks/octane.py
@@ -80,12 +80,10 @@ class Octane(test.Test):
test = _OctaneMeasurement
def CreatePageSet(self, options):
- return page_set.PageSet.FromDict({
- 'archive_data_file': '../page_sets/data/octane.json',
- 'make_javascript_deterministic': False,
- 'pages': [{
- 'url':
- 'http://octane-benchmark.googlecode.com/svn/latest/index.html?auto=1'
- }
- ]
- }, os.path.abspath(__file__))
+ ps = page_set.PageSet(
+ archive_data_file='../page_sets/data/octane.json',
+ make_javascript_deterministic=False,
+ file_path=os.path.abspath(__file__))
+ ps.AddPageWithDefaultRunNavigate(
+ 'http://octane-benchmark.googlecode.com/svn/latest/index.html?auto=1')
+ return ps
diff --git a/tools/perf/benchmarks/page_cycler.py b/tools/perf/benchmarks/page_cycler.py
index 95692a71a5..4aa97ae7b5 100644
--- a/tools/perf/benchmarks/page_cycler.py
+++ b/tools/perf/benchmarks/page_cycler.py
@@ -61,6 +61,7 @@ class PageCyclerMoz(test.Test):
options = {'pageset_repeat': 10}
+@test.Disabled('win') # crbug.com/353260
class PageCyclerNetsimTop10(test.Test):
"""Measures load time of the top 10 sites under simulated cable network."""
tag = 'netsim'
diff --git a/tools/perf/benchmarks/peacekeeper.py b/tools/perf/benchmarks/peacekeeper.py
index 792259ad59..05f218b84f 100644
--- a/tools/perf/benchmarks/peacekeeper.py
+++ b/tools/perf/benchmarks/peacekeeper.py
@@ -14,8 +14,6 @@ second depending on the test. Final Score is computed by calculating geometric
mean of individual tests scores.
"""
-import os
-
from telemetry import test
from telemetry.page import page_measurement
from telemetry.page import page_set
@@ -83,21 +81,16 @@ class PeaceKeeperBenchmark(test.Test):
# The docstring of benchmark classes may also be used as a description
# when 'run_benchmarks list' is run.
description = self.__doc__ or 'PeaceKeeper Benchmark'
- test_urls = []
+ ps = page_set.PageSet(
+ description=description,
+ archive_data_file='../page_sets/data/peacekeeper_%s.json' % self.tag,
+ make_javascript_deterministic=False)
for test_name in self.test_param:
- test_urls.append(
- {"url": ("http://peacekeeper.futuremark.com/run.action?debug=true&"
- "repeat=false&forceSuiteName=%s&forceTestName=%s") %
- (self.tag, test_name)
- })
-
- page_set_dict = {
- 'description': description,
- 'archive_data_file': '../page_sets/data/peacekeeper_%s.json' % self.tag,
- 'make_javascript_deterministic': False,
- 'pages': test_urls,
- }
- return page_set.PageSet.FromDict(page_set_dict, os.path.abspath(__file__))
+ ps.AddPageWithDefaultRunNavigate(
+ ('http://peacekeeper.futuremark.com/run.action?debug=true&'
+ 'repeat=false&forceSuiteName=%s&forceTestName=%s') %
+ (self.tag, test_name))
+ return ps
class PeaceKeeperRender(PeaceKeeperBenchmark):
diff --git a/tools/perf/benchmarks/pica.py b/tools/perf/benchmarks/pica.py
index 98b316e624..f5cef99eb5 100644
--- a/tools/perf/benchmarks/pica.py
+++ b/tools/perf/benchmarks/pica.py
@@ -12,7 +12,7 @@ class _PicaMeasurement(page_measurement.PageMeasurement):
'--enable-experimental-web-platform-features')
def MeasurePage(self, _, tab, results):
- result = int(tab.EvaluateJavaScript('__pica_load_time'))
+ result = int(tab.EvaluateJavaScript('__polymer_ready_time'))
results.Add('Total', 'ms', result)
diff --git a/tools/perf/benchmarks/polymer_load.py b/tools/perf/benchmarks/polymer_load.py
new file mode 100644
index 0000000000..d062b9f62d
--- /dev/null
+++ b/tools/perf/benchmarks/polymer_load.py
@@ -0,0 +1,13 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from measurements import polymer_load
+from telemetry import test
+
+
+class PolymerLoadPica(test.Test):
+ """Measures time to polymer-ready for PICA
+ """
+ test = polymer_load.PolymerLoadMeasurement
+ page_set = 'page_sets/pica.py'
diff --git a/tools/perf/benchmarks/rasterize_and_record_micro.py b/tools/perf/benchmarks/rasterize_and_record_micro.py
index 9d02b8cc4b..a5e88a3f8a 100644
--- a/tools/perf/benchmarks/rasterize_and_record_micro.py
+++ b/tools/perf/benchmarks/rasterize_and_record_micro.py
@@ -7,6 +7,9 @@ from measurements import rasterize_and_record_micro
from telemetry import test
+# RasterizeAndRecord disabled on mac because of crbug.com/350684.
+# RasterizeAndRecord disabled on windows because of crbug.com/338057.
+@test.Disabled('mac', 'win')
class RasterizeAndRecordMicroTop25(test.Test):
"""Measures rasterize and record performance on the top 25 web pages.
@@ -15,6 +18,7 @@ class RasterizeAndRecordMicroTop25(test.Test):
page_set = 'page_sets/top_25.py'
+@test.Disabled('mac', 'win')
class RasterizeAndRecordMicroKeyMobileSites(test.Test):
"""Measures rasterize and record performance on the key mobile sites.
@@ -23,6 +27,7 @@ class RasterizeAndRecordMicroKeyMobileSites(test.Test):
page_set = 'page_sets/key_mobile_sites.py'
+@test.Disabled('mac', 'win')
class RasterizeAndRecordMicroKeySilkCases(test.Test):
"""Measures rasterize and record performance on the silk sites.
@@ -31,6 +36,7 @@ class RasterizeAndRecordMicroKeySilkCases(test.Test):
page_set = 'page_sets/key_silk_cases.py'
+@test.Disabled('mac', 'win')
class RasterizeAndRecordMicroFastPathKeySilkCases(test.Test):
"""Measures rasterize and record performance on the silk sites.
@@ -42,3 +48,27 @@ class RasterizeAndRecordMicroFastPathKeySilkCases(test.Test):
page_set = 'page_sets/key_silk_cases.py'
def CustomizeBrowserOptions(self, options):
silk_flags.CustomizeBrowserOptionsForFastPath(options)
+
+
+@test.Disabled('mac', 'win')
+class RasterizeAndRecordMicroFastPathGpuRasterizationKeySilkCases(test.Test):
+ """Measures rasterize and record performance on the silk sites.
+
+ Uses GPU rasterization together with bleeding edge rendering fast paths.
+
+ http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
+ tag = 'fast_path_gpu_rasterization'
+ test = rasterize_and_record_micro.RasterizeAndRecordMicro
+ page_set = 'page_sets/key_silk_cases.py'
+ def CustomizeBrowserOptions(self, options):
+ silk_flags.CustomizeBrowserOptionsForFastPath(options)
+ silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
+
+
+@test.Enabled('android')
+class RasterizeAndRecordMicroPolymer(test.Test):
+ """Measures rasterize and record performance on the Polymer cases.
+
+ http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
+ test = rasterize_and_record_micro.RasterizeAndRecordMicro
+ page_set = 'page_sets/polymer.py'
diff --git a/tools/perf/benchmarks/repaint.py b/tools/perf/benchmarks/repaint.py
index 353fbf997a..0c8aefc69c 100644
--- a/tools/perf/benchmarks/repaint.py
+++ b/tools/perf/benchmarks/repaint.py
@@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+from benchmarks import silk_flags
from measurements import repaint
from telemetry import test
@@ -23,7 +24,4 @@ class RepaintGpuRasterizationKeyMobileSites(test.Test):
test = repaint.Repaint
page_set = 'page_sets/key_mobile_sites.py'
def CustomizeBrowserOptions(self, options):
- options.AppendExtraBrowserArgs('--enable-threaded-compositing')
- options.AppendExtraBrowserArgs('--force-compositing-mode')
- options.AppendExtraBrowserArgs('--enable-impl-side-painting')
- options.AppendExtraBrowserArgs('--force-gpu-rasterization')
+ silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
diff --git a/tools/perf/benchmarks/robohornet_pro.py b/tools/perf/benchmarks/robohornet_pro.py
index da0a7b5d2b..f8a3e38eab 100644
--- a/tools/perf/benchmarks/robohornet_pro.py
+++ b/tools/perf/benchmarks/robohornet_pro.py
@@ -40,12 +40,11 @@ class RobohornetPro(test.Test):
test = _RobohornetProMeasurement
def CreatePageSet(self, options):
- return page_set.PageSet.FromDict({
- 'archive_data_file': '../page_sets/data/robohornet_pro.json',
- # Measurement require use of real Date.now() for measurement.
- 'make_javascript_deterministic': False,
- 'pages': [
- { 'url':
- 'http://ie.microsoft.com/testdrive/performance/robohornetpro/' }
- ]
- }, os.path.abspath(__file__))
+ ps = page_set.PageSet(
+ archive_data_file='../page_sets/data/robohornet_pro.json',
+ # Measurement require use of real Date.now() for measurement.
+ make_javascript_deterministic=False,
+ file_path=os.path.abspath(__file__))
+ ps.AddPageWithDefaultRunNavigate(
+ 'http://ie.microsoft.com/testdrive/performance/robohornetpro/')
+ return ps
diff --git a/tools/perf/benchmarks/scirra.py b/tools/perf/benchmarks/scirra.py
index 08511cfd98..b3ba56cfab 100644
--- a/tools/perf/benchmarks/scirra.py
+++ b/tools/perf/benchmarks/scirra.py
@@ -54,13 +54,15 @@ class ScirraBenchmark(test.Test):
"""WebGL and Canvas2D rendering benchmark suite."""
test = _ScirraMeasurement
def CreatePageSet(self, options):
- return page_set.PageSet.FromDict({
- 'archive_data_file': '../page_sets/data/scirra.json',
- 'make_javascript_deterministic': False,
- 'pages': [
- { 'url': 'http://www.scirra.com/labs/renderperf3/'},
- { 'url': 'http://www.scirra.com/demos/c2/renderperfgl/'},
- { 'url': 'http://www.scirra.com/demos/c2/renderperf2d/'}
- ]
- }, os.path.abspath(__file__))
+ ps = page_set.PageSet(
+ archive_data_file='../page_sets/data/scirra.json',
+ make_javascript_deterministic=False,
+ file_path=os.path.abspath(__file__))
+ for url in ('http://www.scirra.com/labs/renderperf3/',
+ 'http://www.scirra.com/demos/c2/renderperfgl/',
+ 'http://www.scirra.com/demos/c2/renderperf2d/'):
+ ps.AddPageWithDefaultRunNavigate(url)
+ return ps
+
+
diff --git a/tools/perf/benchmarks/silk_flags.py b/tools/perf/benchmarks/silk_flags.py
index 5f0122ab3e..dfdd57c13d 100644
--- a/tools/perf/benchmarks/silk_flags.py
+++ b/tools/perf/benchmarks/silk_flags.py
@@ -5,3 +5,11 @@
def CustomizeBrowserOptionsForFastPath(options):
"""Enables flags needed for bleeding edge rendering fast paths."""
options.AppendExtraBrowserArgs('--enable-bleeding-edge-rendering-fast-paths')
+
+
+def CustomizeBrowserOptionsForGpuRasterization(options):
+ """Enables flags needed for forced GPU rasterization using Ganesh."""
+ options.AppendExtraBrowserArgs('--enable-threaded-compositing')
+ options.AppendExtraBrowserArgs('--force-compositing-mode')
+ options.AppendExtraBrowserArgs('--enable-impl-side-painting')
+ options.AppendExtraBrowserArgs('--force-gpu-rasterization')
diff --git a/tools/perf/benchmarks/smoothness.py b/tools/perf/benchmarks/smoothness.py
index f0e2ed7608..f4fa52dda3 100644
--- a/tools/perf/benchmarks/smoothness.py
+++ b/tools/perf/benchmarks/smoothness.py
@@ -71,10 +71,7 @@ class SmoothnessGpuRasterizationTop25(test.Test):
test = smoothness.Smoothness
page_set = 'page_sets/top_25.py'
def CustomizeBrowserOptions(self, options):
- options.AppendExtraBrowserArgs('--enable-threaded-compositing')
- options.AppendExtraBrowserArgs('--force-compositing-mode')
- options.AppendExtraBrowserArgs('--enable-impl-side-painting')
- options.AppendExtraBrowserArgs('--force-gpu-rasterization')
+ silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
class SmoothnessGpuRasterizationKeyMobileSites(test.Test):
@@ -85,10 +82,7 @@ class SmoothnessGpuRasterizationKeyMobileSites(test.Test):
test = smoothness.Smoothness
page_set = 'page_sets/key_mobile_sites.py'
def CustomizeBrowserOptions(self, options):
- options.AppendExtraBrowserArgs('--enable-threaded-compositing')
- options.AppendExtraBrowserArgs('--force-compositing-mode')
- options.AppendExtraBrowserArgs('--enable-impl-side-painting')
- options.AppendExtraBrowserArgs('--force-gpu-rasterization')
+ silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
class SmoothnessGpuRasterizationKeySilkCases(test.Test):
@@ -98,10 +92,7 @@ class SmoothnessGpuRasterizationKeySilkCases(test.Test):
test = smoothness.Smoothness
page_set = 'page_sets/key_silk_cases.py'
def CustomizeBrowserOptions(self, options):
- options.AppendExtraBrowserArgs('--enable-threaded-compositing')
- options.AppendExtraBrowserArgs('--force-compositing-mode')
- options.AppendExtraBrowserArgs('--enable-impl-side-painting')
- options.AppendExtraBrowserArgs('--force-gpu-rasterization')
+ silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
class SmoothnessFastPathGpuRasterizationKeySilkCases(
@@ -125,3 +116,49 @@ class SmoothnessToughPinchZoomCases(test.Test):
"""
test = smoothness.Smoothness
page_set = 'page_sets/tough_pinch_zoom_cases.py'
+
+
+@test.Enabled('android')
+class SmoothnessPolymer(test.Test):
+ """Measures rendering statistics for Polymer cases.
+ """
+ test = smoothness.Smoothness
+ page_set = 'page_sets/polymer.py'
+
+
+@test.Enabled('android')
+class SmoothnessFastPathPolymer(test.Test):
+ """Measures rendering statistics for the Polymer cases without GPU
+ rasterization using bleeding edge rendering fast paths.
+ """
+ tag = 'fast_path'
+ test = smoothness.Smoothness
+ page_set = 'page_sets/polymer.py'
+ def CustomizeBrowserOptions(self, options):
+ silk_flags.CustomizeBrowserOptionsForFastPath(options)
+
+
+@test.Enabled('android')
+class SmoothnessGpuRasterizationPolymer(test.Test):
+ """Measures rendering statistics for the Polymer cases with GPU rasterization
+ """
+ tag = 'gpu_rasterization'
+ test = smoothness.Smoothness
+ page_set = 'page_sets/polymer.py'
+ def CustomizeBrowserOptions(self, options):
+ silk_flags.CustomizeBrowserOptionsForGpuRasterization(options)
+
+
+@test.Enabled('android')
+class SmoothnessFastPathGpuRasterizationPolymer(
+ SmoothnessGpuRasterizationPolymer):
+ """Measures rendering statistics for the Polymer cases with GPU rasterization
+ using bleeding edge rendering fast paths.
+ """
+ tag = 'fast_path_gpu_rasterization'
+ test = smoothness.Smoothness
+ page_set = 'page_sets/polymer.py'
+ def CustomizeBrowserOptions(self, options):
+ super(SmoothnessFastPathGpuRasterizationPolymer, self). \
+ CustomizeBrowserOptions(options)
+ silk_flags.CustomizeBrowserOptionsForFastPath(options)
diff --git a/tools/perf/benchmarks/spaceport.py b/tools/perf/benchmarks/spaceport.py
index 4eca5f96be..f4cc522af6 100644
--- a/tools/perf/benchmarks/spaceport.py
+++ b/tools/perf/benchmarks/spaceport.py
@@ -73,6 +73,6 @@ class Spaceport(test.Test):
def CreatePageSet(self, options):
spaceport_dir = os.path.join(util.GetChromiumSrcDir(), 'chrome', 'test',
'data', 'third_party', 'spaceport')
- return page_set.PageSet.FromDict(
- {'pages': [{'url': 'file://index.html'}]},
- spaceport_dir)
+ ps = page_set.PageSet(file_path=spaceport_dir)
+ ps.AddPageWithDefaultRunNavigate('file://index.html')
+ return ps
diff --git a/tools/perf/benchmarks/sunspider.py b/tools/perf/benchmarks/sunspider.py
index 9b1b881fd9..a7ce2b0e87 100644
--- a/tools/perf/benchmarks/sunspider.py
+++ b/tools/perf/benchmarks/sunspider.py
@@ -56,9 +56,9 @@ class Sunspider(test.Test):
test = _SunspiderMeasurement
def CreatePageSet(self, options):
- return page_set.PageSet.FromDict(
- {
- 'archive_data_file': '../page_sets/data/sunspider.json',
- 'make_javascript_deterministic': False,
- 'pages': [{ 'url': _URL }],
- }, os.path.abspath(__file__))
+ ps = page_set.PageSet(
+ archive_data_file='../page_sets/data/sunspider.json',
+ make_javascript_deterministic=False,
+ file_path=os.path.abspath(__file__))
+ ps.AddPageWithDefaultRunNavigate(_URL)
+ return ps
diff --git a/tools/perf/benchmarks/thread_times.py b/tools/perf/benchmarks/thread_times.py
index 66c7d4d736..a1fcd53962 100644
--- a/tools/perf/benchmarks/thread_times.py
+++ b/tools/perf/benchmarks/thread_times.py
@@ -45,3 +45,12 @@ class LegacyFastPathBenchmark(ThreadTimesFastPathMobileSites):
@classmethod
def GetName(cls):
return "fast_path.key_mobile_sites"
+
+
+@test.Enabled('android')
+class ThreadTimesPolymer(test.Test):
+ """Measures timeline metrics while performing smoothness action on
+ Polymer cases."""
+ test = thread_times.ThreadTimes
+ page_set = "page_sets/polymer.py"
+ options = { 'report_silk_results': True } \ No newline at end of file
diff --git a/tools/perf/measurements/OWNERS b/tools/perf/measurements/OWNERS
new file mode 100644
index 0000000000..b4a4e10f63
--- /dev/null
+++ b/tools/perf/measurements/OWNERS
@@ -0,0 +1,3 @@
+per-file *chrome_proxy*=bengr@chromium.org
+per-file *chrome_proxy*=bolian@chromium.org
+per-file *chrome_proxy*=marq@chromium.org
diff --git a/tools/perf/measurements/polymer_load.py b/tools/perf/measurements/polymer_load.py
new file mode 100644
index 0000000000..d7edf3d395
--- /dev/null
+++ b/tools/perf/measurements/polymer_load.py
@@ -0,0 +1,37 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page.actions.all_page_actions import NavigateAction, WaitAction
+from telemetry.page import page
+from telemetry.page import page_measurement
+
+
+class PageForPolymerLoad(page.PageWithDefaultRunNavigate):
+
+ def __init__(self, url, page_set):
+ super(PageForPolymerLoad, self).__init__(
+ url=url,
+ page_set=page_set)
+ self.script_to_evaluate_on_commit = '''
+ document.addEventListener("polymer-ready", function() {
+ var unused = document.body.offsetHeight;
+ window.__polymer_ready_time = performance.now();
+ setTimeout(function() {
+ window.__polymer_ready = true;
+ }, 1000);
+ })
+ '''
+
+ def RunNavigateSteps(self, action_runner):
+ action_runner.RunAction(NavigateAction())
+ action_runner.RunAction(WaitAction(
+ {
+ 'javascript': "window.__polymer_ready"
+ }))
+
+
+class PolymerLoadMeasurement(page_measurement.PageMeasurement):
+ def MeasurePage(self, _, tab, results):
+ result = int(tab.EvaluateJavaScript('__polymer_ready_time'))
+ results.Add('Total', 'ms', result)
diff --git a/tools/perf/measurements/rasterize_and_record_micro.py b/tools/perf/measurements/rasterize_and_record_micro.py
index aa0bfd0edf..d33423cd31 100644
--- a/tools/perf/measurements/rasterize_and_record_micro.py
+++ b/tools/perf/measurements/rasterize_and_record_micro.py
@@ -106,10 +106,12 @@ class RasterizeAndRecordMicro(page_measurement.PageMeasurement):
sys.platform == 'android'):
record_time_sk_null_canvas = data['record_time_sk_null_canvas_ms']
record_time_painting_disabled = data['record_time_painting_disabled_ms']
+ record_time_skrecord = data['record_time_skrecord_ms']
results.Add('record_time_sk_null_canvas', 'ms',
record_time_sk_null_canvas)
results.Add('record_time_painting_disabled', 'ms',
record_time_painting_disabled)
+ results.Add('record_time_skrecord', 'ms', record_time_skrecord)
if self.options.report_detailed_results:
pixels_rasterized_with_non_solid_color = \
diff --git a/tools/perf/measurements/rasterize_and_record_micro_unittest.py b/tools/perf/measurements/rasterize_and_record_micro_unittest.py
index b30e80d7e1..65610fa2d1 100644
--- a/tools/perf/measurements/rasterize_and_record_micro_unittest.py
+++ b/tools/perf/measurements/rasterize_and_record_micro_unittest.py
@@ -5,6 +5,7 @@
import logging
from measurements import rasterize_and_record_micro
+from telemetry import test
from telemetry.core import wpr_modes
from telemetry.page import page_measurement_unittest_base
from telemetry.page import page_test
@@ -28,6 +29,7 @@ class RasterizeAndRecordMicroUnitTest(
self._options.start_wait_time = 0.0
self._options.report_detailed_results = True
+ @test.Disabled('win')
def testRasterizeAndRecordMicro(self):
ps = self.CreatePageSetFromFileInUnittestDataDir('blank.html')
measurement = rasterize_and_record_micro.RasterizeAndRecordMicro()
diff --git a/tools/perf/measurements/repaint_unittest.py b/tools/perf/measurements/repaint_unittest.py
index 85a53130c8..d752d40477 100644
--- a/tools/perf/measurements/repaint_unittest.py
+++ b/tools/perf/measurements/repaint_unittest.py
@@ -6,9 +6,21 @@ from measurements import repaint
from telemetry import test
from telemetry.core import wpr_modes
from telemetry.page import page_measurement_unittest_base
+from telemetry.page import page as page_module
+# pylint: disable=W0401,W0614
+from telemetry.page.actions.all_page_actions import *
from telemetry.unittest import options_for_unittests
+class TestRepaintPage(page_module.PageWithDefaultRunNavigate):
+ def __init__(self, page_set, base_dir):
+ super(TestRepaintPage, self).__init__('file://blank.html',
+ page_set, base_dir)
+
+ def RunRepaint(self, action_runner):
+ action_runner.RunAction(RepaintContinuouslyAction({'seconds': 2}))
+
+
class RepaintUnitTest(
page_measurement_unittest_base.PageMeasurementUnitTestBase):
"""Smoke test for repaint measurement
@@ -24,7 +36,8 @@ class RepaintUnitTest(
@test.Disabled('android')
def testRepaint(self):
- ps = self.CreatePageSetFromFileInUnittestDataDir('blank.html')
+ ps = self.CreateEmptyPageSet()
+ ps.AddPage(TestRepaintPage(ps, ps.base_dir))
measurement = repaint.Repaint()
results = self.RunMeasurement(measurement, ps, options=self._options)
self.assertEquals(0, len(results.failures))
diff --git a/tools/perf/measurements/smooth_gesture_util.py b/tools/perf/measurements/smooth_gesture_util.py
index ebd7fbd195..1331c8c92d 100644
--- a/tools/perf/measurements/smooth_gesture_util.py
+++ b/tools/perf/measurements/smooth_gesture_util.py
@@ -17,6 +17,9 @@ def GetAdjustedInteractionIfContainGesture(timeline, interaction_record):
the browser and renderer process submitting the trace events for the
markers.
"""
+ # Only adjust the range for gestures.
+ if not interaction_record.logical_name.startswith('Gesture_'):
+ return copy.copy(interaction_record)
gesture_events = [
ev for ev
in timeline.GetAllEventsOfName('SyntheticGestureController::running', True)
diff --git a/tools/perf/measurements/smooth_gesture_util_unittest.py b/tools/perf/measurements/smooth_gesture_util_unittest.py
index a825dce18c..d22d011840 100644
--- a/tools/perf/measurements/smooth_gesture_util_unittest.py
+++ b/tools/perf/measurements/smooth_gesture_util_unittest.py
@@ -14,7 +14,8 @@ class SmoothGestureUtilTest(unittest.TestCase):
renderer_main.name = 'CrRendererMain'
# [ X ] [ Y ]
- # [ record_1 ]
+ # [ record_1]
+ # [ record_6]
# [ record_2 ] [ record_3 ]
# [ record_4 ]
# [ record_5 ]
@@ -25,11 +26,16 @@ class SmoothGestureUtilTest(unittest.TestCase):
model.FinalizeImport(shift_world_to_zero=False)
- record_1 = tir_module.TimelineInteractionRecord('included', 15, 25)
- record_2 = tir_module.TimelineInteractionRecord('overlapped_left', 5, 25)
- record_3 = tir_module.TimelineInteractionRecord('overlapped_right', 25, 35)
- record_4 = tir_module.TimelineInteractionRecord('containing', 5, 35)
- record_5 = tir_module.TimelineInteractionRecord('non_overlapped', 35, 45)
+ record_1 = tir_module.TimelineInteractionRecord('Gesture_included', 15, 25)
+ record_2 = tir_module.TimelineInteractionRecord(
+ 'Gesture_overlapped_left', 5, 25)
+ record_3 = tir_module.TimelineInteractionRecord(
+ 'Gesture_overlapped_right', 25, 35)
+ record_4 = tir_module.TimelineInteractionRecord(
+ 'Gesture_containing', 5, 35)
+ record_5 = tir_module.TimelineInteractionRecord(
+ 'Gesture_non_overlapped', 35, 45)
+ record_6 = tir_module.TimelineInteractionRecord('Action_included', 15, 25)
adjusted_record_1 = sg_util.GetAdjustedInteractionIfContainGesture(
model, record_1)
@@ -57,3 +63,10 @@ class SmoothGestureUtilTest(unittest.TestCase):
self.assertEquals(adjusted_record_5.start, 35)
self.assertEquals(adjusted_record_5.end, 45)
self.assertTrue(adjusted_record_5 is not record_5)
+
+ adjusted_record_6 = sg_util.GetAdjustedInteractionIfContainGesture(
+ model, record_6)
+ self.assertEquals(adjusted_record_6.start, 15)
+ self.assertEquals(adjusted_record_6.end, 25)
+ self.assertTrue(adjusted_record_6 is not record_6)
+
diff --git a/tools/perf/measurements/smoothness.py b/tools/perf/measurements/smoothness.py
index 07ab00b32e..e9cdcc2747 100644
--- a/tools/perf/measurements/smoothness.py
+++ b/tools/perf/measurements/smoothness.py
@@ -15,6 +15,7 @@ class Smoothness(page_measurement.PageMeasurement):
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArgs('--enable-gpu-benchmarking')
+ options.AppendExtraBrowserArgs('--touch-events=enabled')
power.PowerMetric.CustomizeBrowserOptions(options)
def WillRunActions(self, page, tab):
diff --git a/tools/perf/measurements/smoothness_unittest.py b/tools/perf/measurements/smoothness_unittest.py
index a335486c3f..7722a5cb49 100644
--- a/tools/perf/measurements/smoothness_unittest.py
+++ b/tools/perf/measurements/smoothness_unittest.py
@@ -45,7 +45,7 @@ class SmoothnessUnitTest(
test_page.synthetic_delays = {
'cc.BeginMainFrame': { 'target_duration': 0.012 },
'cc.DrawAndSwap': { 'target_duration': 0.012, 'mode': 'alternating' },
- 'gpu.SwapBuffers': { 'target_duration': 0.012 }
+ 'gpu.PresentingFrame': { 'target_duration': 0.012 }
}
tab = FakeTab()
@@ -55,7 +55,7 @@ class SmoothnessUnitTest(
expected_category_filter = [
'DELAY(cc.BeginMainFrame;0.012000;static)',
'DELAY(cc.DrawAndSwap;0.012000;alternating)',
- 'DELAY(gpu.SwapBuffers;0.012000;static)',
+ 'DELAY(gpu.PresentingFrame;0.012000;static)',
'benchmark',
'webkit.console'
]
diff --git a/tools/perf/measurements/timeline_based_measurement_unittest.py b/tools/perf/measurements/timeline_based_measurement_unittest.py
index 66b6c17405..eec9adbb48 100644
--- a/tools/perf/measurements/timeline_based_measurement_unittest.py
+++ b/tools/perf/measurements/timeline_based_measurement_unittest.py
@@ -7,6 +7,7 @@ import unittest
from measurements import timeline_based_measurement as tbm_module
from metrics import timeline_based_metric
+from telemetry import test
from telemetry.core import wpr_modes
from telemetry.core.timeline import model as model_module
from telemetry.core.timeline import async_slice
@@ -77,13 +78,9 @@ class TimelineBasedMetricsTests(unittest.TestCase):
metric = tbm_module._TimelineBasedMetrics( # pylint: disable=W0212
self.model, self.renderer_thread,
CreateMetricsForTimelineInteractionRecord)
- ps = page_set.PageSet.FromDict({
- "description": "hello",
- "archive_path": "foo.wpr",
- "pages": [
- {"url": "http://www.bar.com/"}
- ]
- }, os.path.dirname(__file__))
+ ps = page_set.PageSet(file_path=os.path.dirname(__file__))
+ ps.AddPageWithDefaultRunNavigate('http://www.bar.com/')
+
results.WillMeasurePage(ps.pages[0])
metric.AddResults(results)
results.DidMeasurePage()
@@ -100,6 +97,8 @@ class TimelineBasedMeasurementTest(
self._options = options_for_unittests.GetCopy()
self._options.browser_options.wpr_mode = wpr_modes.WPR_OFF
+ # Disabled due to flakiness: crbug.com/368386
+ @test.Disabled
def testTimelineBasedForSmoke(self):
ps = self.CreatePageSetFromFileInUnittestDataDir(
'interaction_enabled_page.html')
diff --git a/tools/perf/metrics/OWNERS b/tools/perf/metrics/OWNERS
new file mode 100644
index 0000000000..b4a4e10f63
--- /dev/null
+++ b/tools/perf/metrics/OWNERS
@@ -0,0 +1,3 @@
+per-file *chrome_proxy*=bengr@chromium.org
+per-file *chrome_proxy*=bolian@chromium.org
+per-file *chrome_proxy*=marq@chromium.org
diff --git a/tools/perf/metrics/rendering_stats.py b/tools/perf/metrics/rendering_stats.py
index ba987327ef..e35699ac24 100644
--- a/tools/perf/metrics/rendering_stats.py
+++ b/tools/perf/metrics/rendering_stats.py
@@ -3,6 +3,7 @@
# found in the LICENSE file.
from operator import attrgetter
+from telemetry.page import page_measurement
# These are LatencyInfo component names indicating the various components
# that the input event has travelled through.
@@ -16,6 +17,19 @@ BEGIN_COMP_NAME = 'INPUT_EVENT_LATENCY_BEGIN_RWH_COMPONENT'
END_COMP_NAME = 'INPUT_EVENT_LATENCY_TERMINATED_FRAME_SWAP_COMPONENT'
+class NotEnoughFramesError(page_measurement.MeasurementFailure):
+ def __init__(self, frame_count):
+ super(NotEnoughFramesError, self).__init__(
+ 'Only %i frame timestamps were collected ' % frame_count +
+ '(at least two are required).\n'
+ 'Issues that have caused this in the past:\n' +
+ '- Browser bugs that prevents the page from redrawing\n' +
+ '- Bugs in the synthetic gesture code\n' +
+ '- Page and benchmark out of sync (e.g. clicked element was renamed)\n' +
+ '- Pages that render extremely slow\n' +
+ '- Pages that can\'t be scrolled')
+
+
def GetScrollInputLatencyEvents(scroll_type, browser_process, timeline_range):
"""Get scroll events' LatencyInfo from the browser process's trace buffer
that are within the timeline_range.
@@ -119,9 +133,9 @@ class RenderingStats(object):
assert(len(timeline_ranges) > 0)
# Find the top level process with rendering stats (browser or renderer).
if HasRenderingStats(browser_process):
- self.top_level_process = browser_process
+ timestamp_process = browser_process
else:
- self.top_level_process = renderer_process
+ timestamp_process = renderer_process
self.frame_timestamps = []
self.frame_times = []
@@ -156,11 +170,21 @@ class RenderingStats(object):
if timeline_range.is_empty:
continue
- self.initMainThreadStatsFromTimeline(timeline_range)
- self.initImplThreadStatsFromTimeline(timeline_range)
- self.initScrollLatencyStatsFromTimeline(browser_process, timeline_range)
-
- def initScrollLatencyStatsFromTimeline(self, browser_process, timeline_range):
+ self._InitFrameTimestampsFromTimeline(timestamp_process, timeline_range)
+ self._InitMainThreadRenderingStatsFromTimeline(
+ renderer_process, timeline_range)
+ self._InitImplThreadRenderingStatsFromTimeline(
+ renderer_process, timeline_range)
+ self._InitScrollLatencyStatsFromTimeline(browser_process, timeline_range)
+
+ # Check if we have collected at least 2 frames in every range. Otherwise we
+ # can't compute any meaningful metrics.
+ for segment in self.frame_timestamps:
+ if len(segment) < 2:
+ raise NotEnoughFramesError(len(segment))
+
+ def _InitScrollLatencyStatsFromTimeline(
+ self, browser_process, timeline_range):
mouse_wheel_events = GetScrollInputLatencyEvents(
"MouseWheel", browser_process, timeline_range)
self.mouse_wheel_scroll_latency = ComputeMouseWheelScrollLatency(
@@ -175,28 +199,40 @@ class RenderingStats(object):
self.js_touch_scroll_latency = ComputeTouchScrollLatency(
js_touch_scroll_events)
- def initMainThreadStatsFromTimeline(self, timeline_range):
- event_name = 'BenchmarkInstrumentation::MainThreadRenderingStats'
+ def _GatherEvents(self, event_name, process, timeline_range):
events = []
- for event in self.top_level_process.IterAllSlicesOfName(event_name):
+ for event in process.IterAllSlicesOfName(event_name):
if event.start >= timeline_range.min and event.end <= timeline_range.max:
if 'data' not in event.args:
continue
events.append(event)
events.sort(key=attrgetter('start'))
+ return events
+
+ def _AddFrameTimestamp(self, event):
+ frame_count = event.args['data']['frame_count']
+ if frame_count > 1:
+ raise ValueError, 'trace contains multi-frame render stats'
+ if frame_count == 1:
+ self.frame_timestamps[-1].append(
+ event.start)
+ if len(self.frame_timestamps[-1]) >= 2:
+ self.frame_times[-1].append(round(self.frame_timestamps[-1][-1] -
+ self.frame_timestamps[-1][-2], 2))
+
+ def _InitFrameTimestampsFromTimeline(self, process, timeline_range):
+ event_name = 'BenchmarkInstrumentation::MainThreadRenderingStats'
+ for event in self._GatherEvents(event_name, process, timeline_range):
+ self._AddFrameTimestamp(event)
+
+ event_name = 'BenchmarkInstrumentation::ImplThreadRenderingStats'
+ for event in self._GatherEvents(event_name, process, timeline_range):
+ self._AddFrameTimestamp(event)
+
- first_frame = True
- for event in events:
- frame_count = event.args['data']['frame_count']
- if frame_count > 1:
- raise ValueError, 'trace contains multi-frame render stats'
- if frame_count == 1:
- self.frame_timestamps[-1].append(
- event.start)
- if not first_frame:
- self.frame_times[-1].append(round(self.frame_timestamps[-1][-1] -
- self.frame_timestamps[-1][-2], 2))
- first_frame = False
+ def _InitMainThreadRenderingStatsFromTimeline(self, process, timeline_range):
+ event_name = 'BenchmarkInstrumentation::MainThreadRenderingStats'
+ for event in self._GatherEvents(event_name, process, timeline_range):
self.paint_times[-1].append(1000.0 *
event.args['data']['paint_time'])
self.painted_pixel_counts[-1].append(
@@ -206,28 +242,9 @@ class RenderingStats(object):
self.recorded_pixel_counts[-1].append(
event.args['data']['recorded_pixel_count'])
- def initImplThreadStatsFromTimeline(self, timeline_range):
+ def _InitImplThreadRenderingStatsFromTimeline(self, process, timeline_range):
event_name = 'BenchmarkInstrumentation::ImplThreadRenderingStats'
- events = []
- for event in self.top_level_process.IterAllSlicesOfName(event_name):
- if event.start >= timeline_range.min and event.end <= timeline_range.max:
- if 'data' not in event.args:
- continue
- events.append(event)
- events.sort(key=attrgetter('start'))
-
- first_frame = True
- for event in events:
- frame_count = event.args['data']['frame_count']
- if frame_count > 1:
- raise ValueError, 'trace contains multi-frame render stats'
- if frame_count == 1:
- self.frame_timestamps[-1].append(
- event.start)
- if not first_frame:
- self.frame_times[-1].append(round(self.frame_timestamps[-1][-1] -
- self.frame_timestamps[-1][-2], 2))
- first_frame = False
+ for event in self._GatherEvents(event_name, process, timeline_range):
self.rasterize_times[-1].append(1000.0 *
event.args['data']['rasterize_time'])
self.rasterized_pixel_counts[-1].append(
diff --git a/tools/perf/metrics/rendering_stats_unittest.py b/tools/perf/metrics/rendering_stats_unittest.py
index bedeecaa87..10419ca0c3 100644
--- a/tools/perf/metrics/rendering_stats_unittest.py
+++ b/tools/perf/metrics/rendering_stats_unittest.py
@@ -11,6 +11,7 @@ from metrics.rendering_stats import ComputeMouseWheelScrollLatency
from metrics.rendering_stats import ComputeTouchScrollLatency
from metrics.rendering_stats import HasRenderingStats
from metrics.rendering_stats import RenderingStats
+from metrics.rendering_stats import NotEnoughFramesError
import telemetry.core.timeline.bounds as timeline_bounds
from telemetry.core.timeline import model
import telemetry.core.timeline.async_slice as tracing_async_slice
@@ -224,6 +225,46 @@ class RenderingStatsUnitTest(unittest.TestCase):
process_with_frames.FinalizeImport()
self.assertTrue(HasRenderingStats(thread_with_frames))
+ def testRangeWithoutFrames(self):
+ timer = MockTimer()
+ timeline = model.TimelineModel()
+
+ # Create a renderer process, with a main thread and impl thread.
+ renderer = timeline.GetOrCreateProcess(pid = 2)
+ renderer_main = renderer.GetOrCreateThread(tid = 21)
+ renderer_compositor = renderer.GetOrCreateThread(tid = 22)
+
+ # Create 10 main and impl rendering stats events for Action A.
+ timer.Advance(2, 4)
+ renderer_main.BeginSlice('webkit.console', 'ActionA', timer.Get(), '')
+ for i in xrange(0, 10):
+ first = (i == 0)
+ AddMainThreadRenderingStats(timer, renderer_main, first, None)
+ AddImplThreadRenderingStats(timer, renderer_compositor, first, None)
+ timer.Advance(2, 4)
+ renderer_main.EndSlice(timer.Get())
+
+ # Create 5 main and impl rendering stats events not within any action.
+ for i in xrange(0, 5):
+ first = (i == 0)
+ AddMainThreadRenderingStats(timer, renderer_main, first, None)
+ AddImplThreadRenderingStats(timer, renderer_compositor, first, None)
+
+ # Create Action B without any frames. This should trigger
+ # NotEnoughFramesError when the RenderingStats object is created.
+ timer.Advance(2, 4)
+ renderer_main.BeginSlice('webkit.console', 'ActionB', timer.Get(), '')
+ timer.Advance(2, 4)
+ renderer_main.EndSlice(timer.Get())
+
+ renderer.FinalizeImport()
+
+ timeline_markers = timeline.FindTimelineMarkers(['ActionA', 'ActionB'])
+ timeline_ranges = [ timeline_bounds.Bounds.CreateFromEvent(marker)
+ for marker in timeline_markers ]
+ self.assertRaises(NotEnoughFramesError, RenderingStats,
+ renderer, None, timeline_ranges)
+
def testFromTimeline(self):
timeline = model.TimelineModel()
@@ -237,18 +278,24 @@ class RenderingStatsUnitTest(unittest.TestCase):
renderer_compositor = renderer.GetOrCreateThread(tid = 22)
timer = MockTimer()
- ref_stats = ReferenceRenderingStats()
+ renderer_ref_stats = ReferenceRenderingStats()
+ browser_ref_stats = ReferenceRenderingStats()
# Create 10 main and impl rendering stats events for Action A.
timer.Advance(2, 4)
renderer_main.BeginSlice('webkit.console', 'ActionA', timer.Get(), '')
- ref_stats.AppendNewRange()
+ renderer_ref_stats.AppendNewRange()
+ browser_ref_stats.AppendNewRange()
for i in xrange(0, 10):
first = (i == 0)
- AddMainThreadRenderingStats(timer, renderer_main, first, None)
- AddImplThreadRenderingStats(timer, renderer_compositor, first, None)
- AddMainThreadRenderingStats(timer, browser_main, first, ref_stats)
- AddImplThreadRenderingStats(timer, browser_compositor, first, ref_stats)
+ AddMainThreadRenderingStats(
+ timer, renderer_main, first, renderer_ref_stats)
+ AddImplThreadRenderingStats(
+ timer, renderer_compositor, first, renderer_ref_stats)
+ AddMainThreadRenderingStats(
+ timer, browser_main, first, browser_ref_stats)
+ AddImplThreadRenderingStats(
+ timer, browser_compositor, first, browser_ref_stats)
timer.Advance(2, 4)
renderer_main.EndSlice(timer.Get())
@@ -263,26 +310,36 @@ class RenderingStatsUnitTest(unittest.TestCase):
# Create 10 main and impl rendering stats events for Action B.
timer.Advance(2, 4)
renderer_main.BeginSlice('webkit.console', 'ActionB', timer.Get(), '')
- ref_stats.AppendNewRange()
+ renderer_ref_stats.AppendNewRange()
+ browser_ref_stats.AppendNewRange()
for i in xrange(0, 10):
first = (i == 0)
- AddMainThreadRenderingStats(timer, renderer_main, first, None)
- AddImplThreadRenderingStats(timer, renderer_compositor, first, None)
- AddMainThreadRenderingStats(timer, browser_main, first, ref_stats)
- AddImplThreadRenderingStats(timer, browser_compositor, first, ref_stats)
+ AddMainThreadRenderingStats(
+ timer, renderer_main, first, renderer_ref_stats)
+ AddImplThreadRenderingStats(
+ timer, renderer_compositor, first, renderer_ref_stats)
+ AddMainThreadRenderingStats(
+ timer, browser_main, first, browser_ref_stats)
+ AddImplThreadRenderingStats(
+ timer, browser_compositor, first, browser_ref_stats)
timer.Advance(2, 4)
renderer_main.EndSlice(timer.Get())
# Create 10 main and impl rendering stats events for Action A.
timer.Advance(2, 4)
renderer_main.BeginSlice('webkit.console', 'ActionA', timer.Get(), '')
- ref_stats.AppendNewRange()
+ renderer_ref_stats.AppendNewRange()
+ browser_ref_stats.AppendNewRange()
for i in xrange(0, 10):
first = (i == 0)
- AddMainThreadRenderingStats(timer, renderer_main, first, None)
- AddImplThreadRenderingStats(timer, renderer_compositor, first, None)
- AddMainThreadRenderingStats(timer, browser_main, first, ref_stats)
- AddImplThreadRenderingStats(timer, browser_compositor, first, ref_stats)
+ AddMainThreadRenderingStats(
+ timer, renderer_main, first, renderer_ref_stats)
+ AddImplThreadRenderingStats(
+ timer, renderer_compositor, first, renderer_ref_stats)
+ AddMainThreadRenderingStats(
+ timer, browser_main, first, browser_ref_stats)
+ AddImplThreadRenderingStats(
+ timer, browser_compositor, first, browser_ref_stats)
timer.Advance(2, 4)
renderer_main.EndSlice(timer.Get())
@@ -295,21 +352,19 @@ class RenderingStatsUnitTest(unittest.TestCase):
for marker in timeline_markers ]
stats = RenderingStats(renderer, browser, timeline_ranges)
- # Check if we are using the browser compositor's stats
- self.assertEquals(stats.top_level_process, browser)
-
# Compare rendering stats to reference.
- self.assertEquals(stats.frame_timestamps, ref_stats.frame_timestamps)
- self.assertEquals(stats.frame_times, ref_stats.frame_times)
- self.assertEquals(stats.rasterize_times, ref_stats.rasterize_times)
+ self.assertEquals(stats.frame_timestamps,
+ browser_ref_stats.frame_timestamps)
+ self.assertEquals(stats.frame_times, browser_ref_stats.frame_times)
+ self.assertEquals(stats.rasterize_times, renderer_ref_stats.rasterize_times)
self.assertEquals(stats.rasterized_pixel_counts,
- ref_stats.rasterized_pixel_counts)
- self.assertEquals(stats.paint_times, ref_stats.paint_times)
+ renderer_ref_stats.rasterized_pixel_counts)
+ self.assertEquals(stats.paint_times, renderer_ref_stats.paint_times)
self.assertEquals(stats.painted_pixel_counts,
- ref_stats.painted_pixel_counts)
- self.assertEquals(stats.record_times, ref_stats.record_times)
+ renderer_ref_stats.painted_pixel_counts)
+ self.assertEquals(stats.record_times, renderer_ref_stats.record_times)
self.assertEquals(stats.recorded_pixel_counts,
- ref_stats.recorded_pixel_counts)
+ renderer_ref_stats.recorded_pixel_counts)
def testScrollLatencyFromTimeline(self):
timeline = model.TimelineModel()
diff --git a/tools/perf/metrics/unittest_data/OWNERS b/tools/perf/metrics/unittest_data/OWNERS
new file mode 100644
index 0000000000..b4a4e10f63
--- /dev/null
+++ b/tools/perf/metrics/unittest_data/OWNERS
@@ -0,0 +1,3 @@
+per-file *chrome_proxy*=bengr@chromium.org
+per-file *chrome_proxy*=bolian@chromium.org
+per-file *chrome_proxy*=marq@chromium.org
diff --git a/tools/perf/page_sets/chrome_proxy/OWNERS b/tools/perf/page_sets/chrome_proxy/OWNERS
new file mode 100644
index 0000000000..6a63b539d1
--- /dev/null
+++ b/tools/perf/page_sets/chrome_proxy/OWNERS
@@ -0,0 +1,3 @@
+bengr@chromium.org
+bolian@chromium.org
+marq@chromium.org
diff --git a/tools/perf/page_sets/data/OWNERS b/tools/perf/page_sets/data/OWNERS
new file mode 100644
index 0000000000..b4a4e10f63
--- /dev/null
+++ b/tools/perf/page_sets/data/OWNERS
@@ -0,0 +1,3 @@
+per-file *chrome_proxy*=bengr@chromium.org
+per-file *chrome_proxy*=bolian@chromium.org
+per-file *chrome_proxy*=marq@chromium.org
diff --git a/tools/perf/page_sets/data/polymer.json b/tools/perf/page_sets/data/polymer.json
new file mode 100644
index 0000000000..5cf0feb1b2
--- /dev/null
+++ b/tools/perf/page_sets/data/polymer.json
@@ -0,0 +1,8 @@
+{
+ "description": "Describes the Web Page Replay archives for a page set. Don't edit by hand! Use record_wpr for updating.",
+ "archives": {
+ "polymer_000.wpr": [
+ "http://localhost:8000/components/paper-calculator/demo.html"
+ ]
+ }
+} \ No newline at end of file
diff --git a/tools/perf/page_sets/data/polymer_000.wpr.sha1 b/tools/perf/page_sets/data/polymer_000.wpr.sha1
new file mode 100644
index 0000000000..3e538b9dec
--- /dev/null
+++ b/tools/perf/page_sets/data/polymer_000.wpr.sha1
@@ -0,0 +1 @@
+308553ef02178a571414d551f8a2084438ec49a5 \ No newline at end of file
diff --git a/tools/perf/page_sets/key_silk_cases.py b/tools/perf/page_sets/key_silk_cases.py
index 41e6f6d3d8..4da7b668e5 100644
--- a/tools/perf/page_sets/key_silk_cases.py
+++ b/tools/perf/page_sets/key_silk_cases.py
@@ -374,11 +374,9 @@ class Page18(KeySilkCasesPage):
def ToggleDrawer(self, action_runner):
action_runner.RunAction(TapAction(
{
- 'selector': '#menu-button'
+ 'selector': '#menu-button',
+ 'wait_after' : {'seconds': 1}
}))
- action_runner.BeginInteraction('Wait', [tir_module.IS_SMOOTH])
- action_runner.RunAction(WaitAction({'seconds': 1}))
- action_runner.EndInteraction('Wait', [tir_module.IS_SMOOTH])
class Page19(KeySilkCasesPage):
@@ -418,13 +416,11 @@ class Page19(KeySilkCasesPage):
'element_function': '''
function(callback) {
callback(document.getElementById('nav-drawer').children[0]);
- }'''
+ }''',
+ 'wait_after' : {
+ 'javascript': '!document.getElementById("nav-drawer").active'
+ }
}))
- action_runner.BeginInteraction('Wait', [tir_module.IS_SMOOTH])
- action_runner.RunAction(WaitAction({
- 'javascript': '!document.getElementById("nav-drawer").active'
- }))
- action_runner.EndInteraction('Wait', [tir_module.IS_SMOOTH])
class Page20(KeySilkCasesPage):
@@ -474,11 +470,9 @@ class Page21(KeySilkCasesPage):
'element_function': '''
function(callback) {
callback(document.getElementsByClassName("vk_arc")[0]);
- }'''
+ }''',
+ 'wait_after': {'seconds': 2}
}))
- action_runner.BeginInteraction('Wait', [tir_module.IS_SMOOTH])
- action_runner.RunAction(WaitAction({'seconds' : 2}))
- action_runner.EndInteraction('Wait', [tir_module.IS_SMOOTH])
def RunNavigateSteps(self, action_runner):
@@ -673,4 +667,4 @@ class KeySilkCasesPageSet(page_set_module.PageSet):
self.AddPage(Page23(self))
self.AddPage(Page24(self))
self.AddPage(Page25(self))
- self.AddPage(Page26(self))
+ # self.AddPage(Page26(self)) # crbug.com/366371
diff --git a/tools/perf/page_sets/mse_cases/startup_test.js b/tools/perf/page_sets/mse_cases/startup_test.js
index 98f714b901..282bde7ce5 100644
--- a/tools/perf/page_sets/mse_cases/startup_test.js
+++ b/tools/perf/page_sets/mse_cases/startup_test.js
@@ -375,12 +375,11 @@
return;
}
+ var testEndTime = getPerfTimestamp();
for (var i = 0; i < appenders.length; ++i) {
appenders[i].onPlaybackStarted(mediaSource);
}
- var testEndTime = getPerfTimestamp();
-
testDone = true;
window.clearInterval(listener);
window.clearTimeout(timeout);
diff --git a/tools/perf/page_sets/pica.py b/tools/perf/page_sets/pica.py
index cc4867a55c..13c2813190 100644
--- a/tools/perf/page_sets/pica.py
+++ b/tools/perf/page_sets/pica.py
@@ -3,30 +3,17 @@
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry.page.actions.all_page_actions import *
-from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
+from measurements import polymer_load
-class PicaPage(page_module.PageWithDefaultRunNavigate):
+class PicaPage(polymer_load.PageForPolymerLoad):
def __init__(self, page_set):
super(PicaPage, self).__init__(
url='http://localhost/polymer/projects/pica/',
page_set=page_set)
self.archive_data_file = 'data/pica.json'
- self.script_to_evaluate_on_commit = '''
- document.addEventListener('polymer-ready', function() {
- var unused = document.body.offsetHeight;
- window.__pica_load_time = performance.now();
- setTimeout(function(){window.__polymer_ready=true}, 1000)
- })'''
-
- def RunNavigateSteps(self, action_runner):
- action_runner.RunAction(NavigateAction())
- action_runner.RunAction(WaitAction(
- {
- 'javascript': 'window.__polymer_ready'
- }))
class PicaPageSet(page_set_module.PageSet):
diff --git a/tools/perf/page_sets/polymer.py b/tools/perf/page_sets/polymer.py
new file mode 100644
index 0000000000..f1a3f97fff
--- /dev/null
+++ b/tools/perf/page_sets/polymer.py
@@ -0,0 +1,84 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# pylint: disable=W0401,W0614
+from telemetry.page.actions.all_page_actions import *
+from telemetry.page import page as page_module
+from telemetry.page import page_set as page_set_module
+
+class PolymerCalculatorPage(page_module.PageWithDefaultRunNavigate):
+
+ def __init__(self, page_set):
+ super(PolymerCalculatorPage, self).__init__(
+ url='http://localhost:8000/components/paper-calculator/demo.html',
+ page_set=page_set)
+ self.user_agent_type = 'mobile'
+ self.archive_data_file = 'data/polymer.json'
+
+ def RunNavigateSteps(self, action_runner):
+ action_runner.RunAction(NavigateAction())
+ action_runner.RunAction(WaitAction(
+ {
+ 'seconds': 2
+ }))
+
+ def RunSmoothness(self, action_runner):
+ self.TapButton(action_runner)
+ self.SlidePanel(action_runner)
+
+ def TapButton(self, action_runner):
+ action_runner.RunAction(TapAction(
+ {
+ 'element_function': '''
+ function(callback) {
+ callback(
+ document.querySelector(
+ 'body /deep/ #outerPanels'
+ ).querySelector(
+ '#standard'
+ ).shadowRoot.querySelector(
+ 'paper-calculator-key[label="5"]'
+ )
+ );
+ }''',
+ 'wait_after': { 'seconds': 2 }
+ }))
+
+ def SlidePanel(self, action_runner):
+ action_runner.RunAction(SwipeAction(
+ {
+ 'left_start_percentage': 0.1,
+ 'distance': 300,
+ 'direction': 'left',
+ 'wait_after': {
+ 'javascript': '''
+ (o = document.querySelector(
+ "body /deep/ #outerPanels"
+ )), o.opened || o.wideMode
+ '''
+ },
+ 'top_start_percentage': 0.2,
+ 'element_function': '''
+ function(callback) {
+ callback(
+ document.querySelector(
+ 'body /deep/ #outerPanels'
+ ).querySelector(
+ '#advanced'
+ ).shadowRoot.querySelector(
+ '.handle-bar'
+ )
+ );
+ }''',
+ 'speed': 5000
+ }))
+
+
+class PolymerPageSet(page_set_module.PageSet):
+
+ def __init__(self):
+ super(PolymerPageSet, self).__init__(
+ user_agent_type='mobile',
+ archive_data_file='data/polymer.json')
+
+ self.AddPage(PolymerCalculatorPage(self))
diff --git a/tools/perf/page_sets/tough_scheduling_cases.py b/tools/perf/page_sets/tough_scheduling_cases.py
index 5f853fe646..86d34715c3 100644
--- a/tools/perf/page_sets/tough_scheduling_cases.py
+++ b/tools/perf/page_sets/tough_scheduling_cases.py
@@ -53,7 +53,7 @@ class Page3(ToughSchedulingCasesPage):
self.synthetic_delays = {
'cc.DrawAndSwap': {'target_duration': 0.004},
- 'gpu.SwapBuffers': {'target_duration': 0.004},
+ 'gpu.PresentingFrame': {'target_duration': 0.004},
'cc.BeginMainFrame': {'target_duration': 0.004}
}
@@ -70,7 +70,7 @@ class Page4(ToughSchedulingCasesPage):
self.synthetic_delays = {
'cc.DrawAndSwap': {'target_duration': 0.012},
- 'gpu.SwapBuffers': {'target_duration': 0.012},
+ 'gpu.PresentingFrame': {'target_duration': 0.012},
'cc.BeginMainFrame': {'target_duration': 0.012}
}
@@ -236,7 +236,7 @@ class Page15(ToughSchedulingCasesPage):
url='file://tough_scheduling_cases/raf.html?gpu_bound',
page_set=page_set)
- self.synthetic_delays = {'gpu.SwapBuffers': {'target_duration': 0.1}}
+ self.synthetic_delays = {'gpu.PresentingFrame': {'target_duration': 0.1}}
class Page16(ToughSchedulingCasesPage):
diff --git a/tools/perf_expectations/perf_expectations.json b/tools/perf_expectations/perf_expectations.json
index a2d3429cf9..700dc84886 100644
--- a/tools/perf_expectations/perf_expectations.json
+++ b/tools/perf_expectations/perf_expectations.json
@@ -365,7 +365,7 @@
"linux-release/sizes/chrome-bss/bss": {"reva": 260243, "revb": 260270, "type": "absolute", "better": "lower", "improve": 324470, "regress": 358626, "sha1": "85f07232"},
"linux-release/sizes/chrome-data/data": {"reva": 234134, "revb": 234142, "type": "absolute", "better": "lower", "improve": 1909598, "regress": 2363084, "sha1": "c3b393d2"},
"linux-release/sizes/chrome-si/initializers": {"reva": 251221, "revb": 251221, "type": "absolute", "better": "lower", "improve": 19, "regress": 19, "tolerance": 0, "sha1": "6800698b"},
- "linux-release/sizes/chrome-text/text": {"reva": 255987, "revb": 256040, "type": "absolute", "better": "lower", "improve": 89942595, "regress": 99411724, "sha1": "a3f12622"},
+ "linux-release/sizes/chrome-text/text": {"reva": 266534, "revb": 266562, "type": "absolute", "better": "lower", "improve": 94446673, "regress": 104400197, "sha1": "1ff99c2f"},
"linux-release/sizes/chrome-textrel/textrel": {"reva": 234134, "revb": 234142, "type": "absolute", "better": "lower", "improve": 0, "regress": 0, "sha1": "61db9eaf"},
"linux-release/sizes/chrome/chrome": {"reva": 255987, "revb": 256040, "type": "absolute", "better": "lower", "improve": 124060944, "regress": 137121148, "sha1": "7e193e84"},
"linux-release/sizes/libffmpegsumo.so-textrel/textrel": {"reva": 200467, "revb": 203456, "type": "absolute", "better": "lower", "improve": 1075, "regress": 1189, "sha1": "a10d4ea4"},
diff --git a/tools/resources/list_resources_removed_by_repack.py b/tools/resources/list_resources_removed_by_repack.py
new file mode 100755
index 0000000000..a009eade41
--- /dev/null
+++ b/tools/resources/list_resources_removed_by_repack.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import os
+import re
+import sys
+
+usage = """%s BUILDTYPE BUILDDIR
+
+BUILDTYPE: either chromium or chrome.
+BUILDDIR: The path to the output directory. e.g. relpath/to/out/Release
+
+Prints out (to stdout) the sorted list of resource ids that are marked as
+unused during the repacking process in the given build log (via stdin).
+Additionally, attempt to print out the name of the resource and the generated
+header file that contains the resource.
+
+This script is used to print the list of resources that are not used so that
+developers will notice and fix their .grd files.
+"""
+
+
+def GetResourceIdsFromRepackMessage(in_data):
+ """Returns sorted set of resource ids that are not used from in_data.
+ """
+ unused_resources = set()
+ unused_pattern = re.compile(
+ 'RePackFromDataPackStrings Removed Key: (?P<resource_id>[0-9]+)')
+ for line in in_data:
+ match = unused_pattern.match(line)
+ if match:
+ resource_id = int(match.group('resource_id'))
+ unused_resources.add(resource_id)
+ return sorted(unused_resources)
+
+
+def Main():
+ if len(sys.argv) != 3:
+ sys.stderr.write(usage % sys.argv[0])
+ return 1
+
+ build_type = sys.argv[1]
+ build_dir = sys.argv[2]
+
+ if build_type not in ('chromium', 'chrome'):
+ sys.stderr.write(usage % sys.argv[0])
+ return 1
+
+ generated_output_dir = os.path.join(build_dir, 'gen')
+ if not os.path.exists(generated_output_dir):
+ sys.stderr.write('Cannot find gen dir %s' % generated_output_dir)
+ return 1
+
+ if build_type == 'chromium':
+ excluded_header = 'google_chrome_strings.h'
+ else:
+ excluded_header = 'chromium_strings.h'
+ data_files = []
+ for root, dirs, files in os.walk(generated_output_dir):
+ if os.path.basename(root) != 'grit':
+ continue
+
+ header_files = [header for header in files if header.endswith('.h')]
+ if excluded_header in header_files:
+ header_files.remove(excluded_header)
+ data_files.extend([os.path.join(root, header) for header in header_files])
+
+ resource_id_to_name_file_map = {}
+ resource_pattern = re.compile('#define (?P<resource_name>[A-Z0-9_]+).* '
+ '(?P<resource_id>[0-9]+)$')
+ for f in data_files:
+ data = open(f).read()
+ for line in data.splitlines():
+ match = resource_pattern.match(line)
+ if match:
+ resource_id = int(match.group('resource_id'))
+ resource_name = match.group('resource_name')
+ if resource_id in resource_id_to_name_file_map:
+ print 'Duplicate:', resource_id
+ print (resource_name, f)
+ print resource_id_to_name_file_map[resource_id]
+ raise
+ resource_id_to_name_file_map[resource_id] = (resource_name, f)
+
+ unused_resources = GetResourceIdsFromRepackMessage(sys.stdin)
+ for resource_id in unused_resources:
+ if resource_id not in resource_id_to_name_file_map:
+ print 'WARNING: Unknown resource id', resource_id
+ continue
+ (resource_name, filename) = resource_id_to_name_file_map[resource_id]
+ sys.stdout.write('%d: %s in %s\n' % (resource_id, resource_name, filename))
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(Main())
diff --git a/tools/run-bisect-perf-regression.py b/tools/run-bisect-perf-regression.py
index ba3a6c8b28..3a7ddb01f5 100755
--- a/tools/run-bisect-perf-regression.py
+++ b/tools/run-bisect-perf-regression.py
@@ -15,6 +15,7 @@ directory provided, and run the bisect-perf-regression.py script there.
import imp
import optparse
import os
+import platform
import subprocess
import sys
import traceback
@@ -367,6 +368,9 @@ def _RunBisectionScript(config, working_directory, path_to_file, path_to_goma,
if config['max_time_minutes']:
cmd.extend(['--max_time_minutes', config['max_time_minutes']])
+ if config.has_key('bisect_mode'):
+ cmd.extend(['--bisect_mode', config['bisect_mode']])
+
cmd.extend(['--build_preference', 'ninja'])
if '--browser=cros' in config['command']:
@@ -388,6 +392,13 @@ def _RunBisectionScript(config, working_directory, path_to_file, path_to_goma,
cmd.extend(['--target_platform', 'android'])
if path_to_goma:
+ # crbug.com/330900. For Windows XP platforms, GOMA service is not supported.
+ # Moreover we don't compile chrome when gs_bucket flag is set instead
+ # use builds archives, therefore ignore GOMA service for Windows XP.
+ if config.get('gs_bucket') and platform.release() == 'XP':
+ print ('Goma doesn\'t have a win32 binary, therefore it is not supported '
+ 'on Windows XP platform. Please refer to crbug.com/330900.')
+ path_to_goma = None
cmd.append('--use_goma')
if path_to_extra_src:
diff --git a/tools/telemetry/bootstrap_deps b/tools/telemetry/bootstrap_deps
index a73054069a..39f46ba2f1 100644
--- a/tools/telemetry/bootstrap_deps
+++ b/tools/telemetry/bootstrap_deps
@@ -14,7 +14,7 @@ deps = {
"https://web-page-replay.googlecode.com/svn/trunk",
"src/third_party/trace-viewer":
"https://trace-viewer.googlecode.com/svn/trunk",
- "src/third_party/third_party/android_tools/sdk/platform-tools":
+ "src/third_party/android_tools/sdk/platform-tools":
"https://src.chromium.org/chrome/trunk/src/third_party/android_tools/sdk/platform-tools",
"src/build/android":
"https://src.chromium.org/chrome/trunk/src/build/android",
diff --git a/tools/telemetry/telemetry/core/backends/adb_commands.py b/tools/telemetry/telemetry/core/backends/adb_commands.py
index c02df4630b..d9b64a5ee2 100644
--- a/tools/telemetry/telemetry/core/backends/adb_commands.py
+++ b/tools/telemetry/telemetry/core/backends/adb_commands.py
@@ -25,11 +25,12 @@ try:
from pylib import ports # pylint: disable=F0401
except Exception:
ports = None
+from pylib.device import device_utils
from pylib.utils import apk_helper # pylint: disable=F0401
def IsAndroidSupported():
- return android_commands != None
+ return device_utils != None
def GetAttachedDevices():
@@ -52,21 +53,22 @@ class AdbCommands(object):
"""A thin wrapper around ADB"""
def __init__(self, device):
- self._adb = android_commands.AndroidCommands(device)
- self._device = device
+ self._device = device_utils.DeviceUtils(device)
+ self._device_serial = device
+
+ def device_serial(self):
+ return self._device_serial
def device(self):
return self._device
- def Adb(self):
- return self._adb
-
def __getattr__(self, name):
- """Delegate all unknown calls to the underlying _adb object."""
- return getattr(self._adb, name)
+ """Delegate all unknown calls to the underlying AndroidCommands object."""
+ return getattr(self._device.old_interface, name)
def Forward(self, local, remote):
- ret = self._adb.Adb().SendCommand('forward %s %s' % (local, remote))
+ ret = self._device.old_interface.Adb().SendCommand(
+ 'forward %s %s' % (local, remote))
assert ret == ''
def Install(self, apk_path):
@@ -84,10 +86,11 @@ class AdbCommands(object):
constants.SetBuildType('Debug')
apk_package_name = apk_helper.GetPackageName(apk_path)
- return self._adb.ManagedInstall(apk_path, package_name=apk_package_name)
+ return self._device.old_interface.ManagedInstall(
+ apk_path, package_name=apk_package_name)
def IsUserBuild(self):
- return self._adb.GetBuildType() == 'user'
+ return self._device.old_interface.GetBuildType() == 'user'
def GetBuildTypeOfPath(path):
diff --git a/tools/telemetry/telemetry/core/backends/chrome/android_browser_backend.py b/tools/telemetry/telemetry/core/backends/chrome/android_browser_backend.py
index 56a79d3f61..9eead01088 100644
--- a/tools/telemetry/telemetry/core/backends/chrome/android_browser_backend.py
+++ b/tools/telemetry/telemetry/core/backends/chrome/android_browser_backend.py
@@ -86,10 +86,11 @@ class ChromeBackendSettings(AndroidBrowserBackendSettings):
profile_base = os.path.basename(profile_parent)
saved_profile_location = '/sdcard/profile/%s' % profile_base
- self.adb.Adb().PushIfNeeded(new_profile_dir, saved_profile_location)
+ self.adb.device().old_interface.PushIfNeeded(
+ new_profile_dir, saved_profile_location)
- self.adb.Adb().EfficientDeviceDirectoryCopy(saved_profile_location,
- self.profile_dir)
+ self.adb.device().old_interface.EfficientDeviceDirectoryCopy(
+ saved_profile_location, self.profile_dir)
dumpsys = self.adb.RunShellCommand('dumpsys package %s' % self.package)
id_line = next(line for line in dumpsys if 'userId=' in line)
uid = re.search('\d+', id_line).group()
@@ -194,7 +195,7 @@ class AndroidBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
# Kill old browser.
self._adb.CloseApplication(self._backend_settings.package)
- if self._adb.Adb().CanAccessProtectedFileContents():
+ if self._adb.device().old_interface.CanAccessProtectedFileContents():
if self.browser_options.profile_dir:
self._backend_settings.PushProfile(self.browser_options.profile_dir)
elif not self.browser_options.dont_override_profile:
@@ -240,8 +241,8 @@ class AndroidBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
def _SetCommandLineFile(self, file_contents):
logging.debug('Using command line: ' + file_contents)
def IsProtectedFile(name):
- if self._adb.Adb().FileExistsOnDevice(name):
- return not self._adb.Adb().IsFileWritableOnDevice(name)
+ if self._adb.device().old_interface.FileExistsOnDevice(name):
+ return not self._adb.device().old_interface.IsFileWritableOnDevice(name)
else:
parent_name = os.path.dirname(name)
if parent_name != '':
@@ -250,19 +251,23 @@ class AndroidBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
return True
if IsProtectedFile(self._backend_settings.cmdline_file):
- if not self._adb.Adb().CanAccessProtectedFileContents():
+ if not self._adb.device().old_interface.CanAccessProtectedFileContents():
logging.critical('Cannot set Chrome command line. '
'Fix this by flashing to a userdebug build.')
sys.exit(1)
- self._saved_cmdline = ''.join(self._adb.Adb().GetProtectedFileContents(
- self._backend_settings.cmdline_file) or [])
- self._adb.Adb().SetProtectedFileContents(
+ self._saved_cmdline = ''.join(
+ self._adb.device().old_interface.GetProtectedFileContents(
+ self._backend_settings.cmdline_file)
+ or [])
+ self._adb.device().old_interface.SetProtectedFileContents(
self._backend_settings.cmdline_file, file_contents)
else:
- self._saved_cmdline = ''.join(self._adb.Adb().GetFileContents(
- self._backend_settings.cmdline_file) or [])
- self._adb.Adb().SetFileContents(self._backend_settings.cmdline_file,
- file_contents)
+ self._saved_cmdline = ''.join(
+ self._adb.device().old_interface.GetFileContents(
+ self._backend_settings.cmdline_file)
+ or [])
+ self._adb.device().old_interface.SetFileContents(
+ self._backend_settings.cmdline_file, file_contents)
def Start(self):
self._SetUpCommandLine()
@@ -275,7 +280,7 @@ class AndroidBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
# If we have no existing tabs start with a blank page since default
# startup with the NTP can lead to race conditions with Telemetry
url = 'about:blank'
- self._adb.Adb().DismissCrashDialogIfNeeded()
+ self._adb.device().old_interface.DismissCrashDialogIfNeeded()
self._adb.StartActivity(self._backend_settings.package,
self._backend_settings.activity,
True,
@@ -291,7 +296,7 @@ class AndroidBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
self._PostBrowserStartupInitialization()
except exceptions.BrowserGoneException:
logging.critical('Failed to connect to browser.')
- if not self._adb.Adb().CanAccessProtectedFileContents():
+ if not self._adb.device().old_interface.CanAccessProtectedFileContents():
logging.critical(
'Resolve this by either: '
'(1) Flashing to a userdebug build OR '
@@ -383,7 +388,8 @@ class AndroidBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
# is fixed in android's adb_interface at 60 seconds, which may
# be too short to pull the cache.
cmd = 'pull %s %s' % (source, dest)
- self._adb.Adb().Adb().SendCommand(cmd, timeout_time=240)
+ self._adb.device().old_interface.Adb().SendCommand(
+ cmd, timeout_time=240)
def IsBrowserRunning(self):
pids = self._adb.ExtractPid(self._backend_settings.package)
@@ -415,7 +421,7 @@ class AndroidBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
if os.path.exists(tombstones):
ret += Decorate('Tombstones',
subprocess.Popen([tombstones, '-w', '--device',
- self._adb.device()],
+ self._adb.device_serial()],
stdout=subprocess.PIPE).communicate()[0])
return ret
diff --git a/tools/telemetry/telemetry/core/backends/chrome/android_browser_finder.py b/tools/telemetry/telemetry/core/backends/chrome/android_browser_finder.py
index 6adca50829..846e47d8d6 100644
--- a/tools/telemetry/telemetry/core/backends/chrome/android_browser_finder.py
+++ b/tools/telemetry/telemetry/core/backends/chrome/android_browser_finder.py
@@ -96,7 +96,7 @@ class PossibleAndroidBrowser(possible_browser.PossibleBrowser):
@decorators.Cache
def _platform_backend(self):
return android_platform_backend.AndroidPlatformBackend(
- self._backend_settings.adb.Adb(),
+ self._backend_settings.adb.device(),
self.finder_options.no_performance_mode)
def Create(self):
@@ -118,6 +118,7 @@ class PossibleAndroidBrowser(possible_browser.PossibleBrowser):
def HaveLocalAPK(self):
return self._local_apk and os.path.exists(self._local_apk)
+ @decorators.Cache
def UpdateExecutableIfNeeded(self):
if self.HaveLocalAPK():
real_logging.warn(
diff --git a/tools/telemetry/telemetry/core/backends/chrome/android_browser_finder_unittest.py b/tools/telemetry/telemetry/core/backends/chrome/android_browser_finder_unittest.py
index 3f55666890..a33266e8a6 100644
--- a/tools/telemetry/telemetry/core/backends/chrome/android_browser_finder_unittest.py
+++ b/tools/telemetry/telemetry/core/backends/chrome/android_browser_finder_unittest.py
@@ -1,6 +1,7 @@
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+
import unittest
from telemetry import test
@@ -8,6 +9,7 @@ from telemetry.core import browser_options
from telemetry.core.backends.chrome import android_browser_finder
from telemetry.unittest import system_stub
+
class LoggingStub(object):
def __init__(self):
self.warnings = []
@@ -18,10 +20,11 @@ class LoggingStub(object):
def warn(self, msg, *args):
self.warnings.append(msg % args)
+
class AndroidBrowserFinderTest(unittest.TestCase):
def setUp(self):
self._stubs = system_stub.Override(android_browser_finder,
- ['adb_commands', 'subprocess'])
+ ['adb_commands', 'os', 'subprocess'])
android_browser_finder.adb_works = None # Blow cache between runs.
def tearDown(self):
@@ -42,7 +45,6 @@ class AndroidBrowserFinderTest(unittest.TestCase):
browsers = android_browser_finder.FindAllAvailableBrowsers(finder_options)
self.assertEquals(0, len(browsers))
-
def test_adb_permissions_error(self):
finder_options = browser_options.BrowserFinderOptions()
@@ -59,7 +61,6 @@ class AndroidBrowserFinderTest(unittest.TestCase):
self.assertEquals(3, len(log_stub.warnings))
self.assertEquals(0, len(browsers))
-
def test_adb_two_devices(self):
finder_options = browser_options.BrowserFinderOptions()
diff --git a/tools/telemetry/telemetry/core/backends/chrome/chrome_browser_backend.py b/tools/telemetry/telemetry/core/backends/chrome/chrome_browser_backend.py
index 5b3ef23da5..2f467f1987 100644
--- a/tools/telemetry/telemetry/core/backends/chrome/chrome_browser_backend.py
+++ b/tools/telemetry/telemetry/core/backends/chrome/chrome_browser_backend.py
@@ -21,7 +21,6 @@ from telemetry.core import wpr_modes
from telemetry.core import wpr_server
from telemetry.core.backends import browser_backend
from telemetry.core.backends.chrome import extension_backend
-from telemetry.core.backends.chrome import misc_web_contents_backend
from telemetry.core.backends.chrome import system_info_backend
from telemetry.core.backends.chrome import tab_list_backend
from telemetry.core.backends.chrome import tracing_backend
@@ -76,12 +75,6 @@ class ChromeBrowserBackend(browser_backend.BrowserBackend):
@property
@decorators.Cache
- def misc_web_contents_backend(self):
- """Access to chrome://oobe/login page."""
- return misc_web_contents_backend.MiscWebContentsBackend(self)
-
- @property
- @decorators.Cache
def extension_backend(self):
if not self.supports_extensions:
return None
diff --git a/tools/telemetry/telemetry/core/backends/chrome/chrome_browser_options.py b/tools/telemetry/telemetry/core/backends/chrome/chrome_browser_options.py
index 3c266033b7..d2533a0402 100644
--- a/tools/telemetry/telemetry/core/backends/chrome/chrome_browser_options.py
+++ b/tools/telemetry/telemetry/core/backends/chrome/chrome_browser_options.py
@@ -32,6 +32,9 @@ class CrosBrowserOptions(ChromeBrowserOptions):
super(CrosBrowserOptions, self).__init__(br_options)
# Create a browser with oobe property.
self.create_browser_with_oobe = False
+ # Clear enterprise policy before logging in.
+ self.clear_enterprise_policy = False
+
self.auto_login = True
self.gaia_login = False
self.username = 'test@test.test'
diff --git a/tools/telemetry/telemetry/core/backends/chrome/cros_browser_backend.py b/tools/telemetry/telemetry/core/backends/chrome/cros_browser_backend.py
index e31a439789..8b6449434a 100644
--- a/tools/telemetry/telemetry/core/backends/chrome/cros_browser_backend.py
+++ b/tools/telemetry/telemetry/core/backends/chrome/cros_browser_backend.py
@@ -11,6 +11,7 @@ from telemetry.core import exceptions
from telemetry.core import forwarders
from telemetry.core import util
from telemetry.core.backends.chrome import chrome_browser_backend
+from telemetry.core.backends.chrome import misc_web_contents_backend
from telemetry.core.forwarders import cros_forwarder
@@ -47,8 +48,6 @@ class CrOSBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
self._remote_debugging_port = self._cri.GetRemotePort()
self._port = self._remote_debugging_port
- self._SetBranchNumber(self._GetChromeVersion())
-
# Copy extensions to temp directories on the device.
# Note that we also perform this copy locally to ensure that
# the owner of the extensions is set to chronos.
@@ -59,9 +58,11 @@ class CrOSBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
cri.Chown(extension_dir)
e.local_path = os.path.join(extension_dir, os.path.basename(e.path))
- # Ensure the UI is running and logged out.
- self._RestartUI()
- util.WaitFor(self.IsBrowserRunning, 20)
+ self._cri.RunCmdOnDevice(['stop', 'ui'])
+
+ if self.browser_options.clear_enterprise_policy:
+ self._cri.RmRF('/var/lib/whitelist/*')
+ self._cri.RmRF('/home/chronos/Local\ State')
# Delete test user's cryptohome vault (user data directory).
if not self.browser_options.dont_override_profile:
@@ -73,6 +74,11 @@ class CrOSBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
self.profile_directory)
cri.Chown(self.profile_directory)
+ self._cri.RunCmdOnDevice(['start', 'ui'])
+ util.WaitFor(self.IsBrowserRunning, 20)
+
+ self._SetBranchNumber(self._GetChromeVersion())
+
def GetBrowserStartupArgs(self):
args = super(CrOSBrowserBackend, self).GetBrowserStartupArgs()
args.extend([
@@ -198,15 +204,17 @@ class CrOSBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
if self.browser_options.auto_login:
if self._is_guest:
pid = self.pid
- self._NavigateGuestLogin()
+ self.oobe.NavigateGuestLogin()
# Guest browsing shuts down the current browser and launches an
# incognito browser in a separate process, which we need to wait for.
util.WaitFor(lambda: pid != self.pid, 10)
self._WaitForBrowserToComeUp()
elif self.browser_options.gaia_login:
- self._NavigateGaiaLogin()
+ self.oobe.NavigateGaiaLogin(self.browser_options.username,
+ self.browser_options.password)
else:
- self._NavigateFakeLogin()
+ self.oobe.NavigateFakeLogin(self.browser_options.username,
+ self.browser_options.password)
logging.info('Browser is up!')
@@ -249,6 +257,15 @@ class CrOSBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
else:
self._cri.RunCmdOnDevice(['start', 'ui'])
+ def TakeScreenShot(self, screenshot_prefix):
+ self._cri.TakeScreenShot(screenshot_prefix)
+
+ @property
+ @decorators.Cache
+ def misc_web_contents_backend(self):
+ """Access to chrome://oobe/login page."""
+ return misc_web_contents_backend.MiscWebContentsBackend(self)
+
@property
def oobe(self):
return self.misc_web_contents_backend.GetOobe()
@@ -257,19 +274,6 @@ class CrOSBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
def oobe_exists(self):
return self.misc_web_contents_backend.oobe_exists
- def _SigninUIState(self):
- """Returns the signin ui state of the oobe. HIDDEN: 0, GAIA_SIGNIN: 1,
- ACCOUNT_PICKER: 2, WRONG_HWID_WARNING: 3, MANAGED_USER_CREATION_FLOW: 4.
- These values are in
- chrome/browser/resources/chromeos/login/display_manager.js
- """
- return self.oobe.EvaluateJavaScript('''
- loginHeader = document.getElementById('login-header-bar')
- if (loginHeader) {
- loginHeader.signinUIState_;
- }
- ''')
-
def _IsCryptohomeMounted(self):
username = '$guest' if self._is_guest else self.browser_options.username
return self._cri.IsCryptohomeMounted(username, self._is_guest)
@@ -281,100 +285,15 @@ class CrOSBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
self.HasBrowserFinishedLaunching() and
not self.oobe_exists)
- def _WaitForSigninScreen(self):
- """Waits for oobe to be on the signin or account picker screen."""
- def OnAccountPickerScreen():
- signin_state = self._SigninUIState()
- # GAIA_SIGNIN or ACCOUNT_PICKER screens.
- return signin_state == 1 or signin_state == 2
- try:
- util.WaitFor(OnAccountPickerScreen, 60)
- except util.TimeoutException:
- self._cri.TakeScreenShot('guest-screen')
- raise exceptions.LoginException('Timed out waiting for signin screen, '
- 'signin state %d' % self._SigninUIState())
-
- def _ClickBrowseAsGuest(self):
- """Click the Browse As Guest button on the account picker screen. This will
- restart the browser, and we could have a tab crash or a browser crash."""
- try:
- self.oobe.EvaluateJavaScript("""
- var guest = document.getElementById("guest-user-button");
- if (guest) {
- guest.click();
- }
- """)
- except (exceptions.TabCrashException,
- exceptions.BrowserConnectionGoneException):
- pass
-
- def _GaiaLoginContext(self):
- oobe = self.oobe
- # TODO(achuith): Implement an api in the oobe instead of calling
- # chrome.send.
- oobe.ExecuteJavaScript("chrome.send('addUser');")
- for gaia_context in range(15):
- try:
- if oobe.EvaluateJavaScriptInContext(
- "document.getElementById('Email') != null", gaia_context):
- return gaia_context
- except exceptions.EvaluateException:
- pass
- return None
+ def WaitForLogin(self):
+ if self._is_guest:
+ util.WaitFor(self._IsCryptohomeMounted, 30)
+ return
- def _NavigateGuestLogin(self):
- """Navigates through oobe login screen as guest."""
- logging.info('Logging in as guest')
- oobe = self.oobe
- util.WaitFor(lambda: oobe.EvaluateJavaScript(
- 'typeof Oobe !== \'undefined\''), 10)
-
- if oobe.EvaluateJavaScript(
- "typeof Oobe.guestLoginForTesting != 'undefined'"):
- oobe.ExecuteJavaScript('Oobe.guestLoginForTesting();')
- else:
- self._WaitForSigninScreen()
- self._ClickBrowseAsGuest()
-
- util.WaitFor(self._IsCryptohomeMounted, 30)
-
- def _NavigateFakeLogin(self):
- """Logs in using Oobe.loginForTesting."""
- logging.info('Invoking Oobe.loginForTesting')
- oobe = self.oobe
- util.WaitFor(lambda: oobe.EvaluateJavaScript(
- 'typeof Oobe !== \'undefined\''), 10)
-
- if oobe.EvaluateJavaScript(
- 'typeof Oobe.loginForTesting == \'undefined\''):
- raise exceptions.LoginException('Oobe.loginForTesting js api missing')
-
- oobe.ExecuteJavaScript(
- 'Oobe.loginForTesting(\'%s\', \'%s\');'
- % (self.browser_options.username, self.browser_options.password))
- self._WaitForLogin()
-
- def _NavigateGaiaLogin(self):
- """Logs into the GAIA service with provided credentials."""
- # TODO(achuith): Fake gaia service with a python server.
- self._WaitForSigninScreen()
- gaia_context = util.WaitFor(self._GaiaLoginContext, timeout=10)
- oobe = self.oobe
- oobe.ExecuteJavaScriptInContext(
- "document.getElementById('Email').value='%s';"
- % self.browser_options.username, gaia_context)
- oobe.ExecuteJavaScriptInContext(
- "document.getElementById('Passwd').value='%s';"
- % self.browser_options.password, gaia_context)
- oobe.ExecuteJavaScriptInContext(
- "document.getElementById('signIn').click();", gaia_context)
- self._WaitForLogin()
-
- def _WaitForLogin(self):
try:
util.WaitFor(self._IsLoggedIn, 60)
except util.TimeoutException:
- self._cri.TakeScreenShot('login-screen')
+ self.TakeScreenShot('login-screen')
raise exceptions.LoginException('Timed out going through login screen')
# Wait for extensions to load.
@@ -382,7 +301,7 @@ class CrOSBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
self._WaitForBrowserToComeUp()
except util.TimeoutException:
logging.error('Chrome args: %s' % self._GetChromeProcess()['args'])
- self._cri.TakeScreenShot('extension-timeout')
+ self.TakeScreenShot('extension-timeout')
raise
# Workaround for crbug.com/329271, crbug.com/334726.
diff --git a/tools/telemetry/telemetry/core/backends/chrome/cros_test_case.py b/tools/telemetry/telemetry/core/backends/chrome/cros_test_case.py
new file mode 100644
index 0000000000..a12d76826c
--- /dev/null
+++ b/tools/telemetry/telemetry/core/backends/chrome/cros_test_case.py
@@ -0,0 +1,76 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import unittest
+
+from telemetry.core import browser_finder
+from telemetry.core import extension_to_load
+from telemetry.core import util
+from telemetry.core.backends.chrome import cros_interface
+from telemetry.unittest import options_for_unittests
+
+
+class CrOSTestCase(unittest.TestCase):
+ def setUp(self):
+ options = options_for_unittests.GetCopy()
+ self._cri = cros_interface.CrOSInterface(options.cros_remote,
+ options.cros_ssh_identity)
+ self._is_guest = options.browser_type == 'cros-chrome-guest'
+ self._username = options.browser_options.username
+ self._password = options.browser_options.password
+ self._load_extension = None
+
+ def _CreateBrowser(self, autotest_ext=False, auto_login=True,
+ gaia_login=False, username=None, password=None):
+ """Finds and creates a browser for tests. if autotest_ext is True,
+ also loads the autotest extension"""
+ options = options_for_unittests.GetCopy()
+
+ if autotest_ext:
+ extension_path = os.path.join(util.GetUnittestDataDir(), 'autotest_ext')
+ assert os.path.isdir(extension_path)
+ self._load_extension = extension_to_load.ExtensionToLoad(
+ path=extension_path,
+ browser_type=options.browser_type,
+ is_component=True)
+ options.extensions_to_load = [self._load_extension]
+
+ browser_to_create = browser_finder.FindBrowser(options)
+ self.assertTrue(browser_to_create)
+ options.browser_options.create_browser_with_oobe = True
+ options.browser_options.auto_login = auto_login
+ options.browser_options.gaia_login = gaia_login
+ if username is not None:
+ options.browser_options.username = username
+ if password is not None:
+ options.browser_options.password = password
+
+ return browser_to_create.Create()
+
+ def _GetAutotestExtension(self, browser):
+ """Returns the autotest extension instance"""
+ extension = browser.extensions[self._load_extension]
+ self.assertTrue(extension)
+ return extension
+
+ def _IsCryptohomeMounted(self):
+ """Returns True if cryptohome is mounted. as determined by the cmd
+ cryptohome --action=is_mounted"""
+ return self._cri.RunCmdOnDevice(
+ ['/usr/sbin/cryptohome', '--action=is_mounted'])[0].strip() == 'true'
+
+ def _GetLoginStatus(self, browser):
+ extension = self._GetAutotestExtension(browser)
+ self.assertTrue(extension.EvaluateJavaScript(
+ "typeof('chrome.autotestPrivate') != 'undefined'"))
+ extension.ExecuteJavaScript('''
+ window.__login_status = null;
+ chrome.autotestPrivate.loginStatus(function(s) {
+ window.__login_status = s;
+ });
+ ''')
+ return util.WaitFor(
+ lambda: extension.EvaluateJavaScript('window.__login_status'), 10)
+
diff --git a/tools/telemetry/telemetry/core/backends/chrome/cros_unittest.py b/tools/telemetry/telemetry/core/backends/chrome/cros_unittest.py
index 6c0d2665f4..2d5277ca92 100644
--- a/tools/telemetry/telemetry/core/backends/chrome/cros_unittest.py
+++ b/tools/telemetry/telemetry/core/backends/chrome/cros_unittest.py
@@ -3,59 +3,14 @@
# found in the LICENSE file.
import logging
-import os
-import unittest
from telemetry import test
-from telemetry.core import browser_finder
from telemetry.core import exceptions
-from telemetry.core import extension_to_load
from telemetry.core import util
-from telemetry.core.backends.chrome import cros_interface
-from telemetry.unittest import options_for_unittests
-
-class CrOSTest(unittest.TestCase):
- def setUp(self):
- options = options_for_unittests.GetCopy()
- self._cri = cros_interface.CrOSInterface(options.cros_remote,
- options.cros_ssh_identity)
- self._is_guest = options.browser_type == 'cros-chrome-guest'
- self._username = options.browser_options.username
- self._password = options.browser_options.password
- self._load_extension = None
-
- def _CreateBrowser(self, autotest_ext=False, auto_login=True):
- """Finds and creates a browser for tests. if autotest_ext is True,
- also loads the autotest extension"""
- options = options_for_unittests.GetCopy()
-
- if autotest_ext:
- extension_path = os.path.join(util.GetUnittestDataDir(), 'autotest_ext')
- assert os.path.isdir(extension_path)
- self._load_extension = extension_to_load.ExtensionToLoad(
- path=extension_path,
- browser_type=options.browser_type,
- is_component=True)
- options.extensions_to_load = [self._load_extension]
-
- browser_to_create = browser_finder.FindBrowser(options)
- self.assertTrue(browser_to_create)
- options.browser_options.create_browser_with_oobe = True
- options.browser_options.auto_login = auto_login
- return browser_to_create.Create()
-
- def _GetAutotestExtension(self, browser):
- """Returns the autotest extension instance"""
- extension = browser.extensions[self._load_extension]
- self.assertTrue(extension)
- return extension
-
- def _IsCryptohomeMounted(self):
- """Returns True if cryptohome is mounted. as determined by the cmd
- cryptohome --action=is_mounted"""
- return self._cri.RunCmdOnDevice(
- ['/usr/sbin/cryptohome', '--action=is_mounted'])[0].strip() == 'true'
+from telemetry.core.backends.chrome import cros_test_case
+
+class CrOSCryptohomeTest(cros_test_case.CrOSTestCase):
@test.Enabled('chromeos')
def testCryptohome(self):
"""Verifies cryptohome mount status for regular and guest user and when
@@ -79,19 +34,8 @@ class CrOSTest(unittest.TestCase):
self.assertEquals(self._cri.FilesystemMountedAt('/home/chronos/user'),
'/dev/mapper/encstateful')
- def _GetLoginStatus(self, browser):
- extension = self._GetAutotestExtension(browser)
- self.assertTrue(extension.EvaluateJavaScript(
- "typeof('chrome.autotestPrivate') != 'undefined'"))
- extension.ExecuteJavaScript('''
- window.__login_status = null;
- chrome.autotestPrivate.loginStatus(function(s) {
- window.__login_status = s;
- });
- ''')
- return util.WaitFor(
- lambda: extension.EvaluateJavaScript('window.__login_status'), 10)
+class CrOSLoginTest(cros_test_case.CrOSTestCase):
@test.Enabled('chromeos')
def testLoginStatus(self):
"""Tests autotestPrivate.loginStatus"""
@@ -106,6 +50,22 @@ class CrOSTest(unittest.TestCase):
self.assertEquals(login_status['email'], self._username)
self.assertFalse(login_status['isScreenLocked'])
+ @test.Enabled('chromeos')
+ def testLogout(self):
+ """Tests autotestPrivate.logout"""
+ if self._is_guest:
+ return
+ with self._CreateBrowser(autotest_ext=True) as b:
+ extension = self._GetAutotestExtension(b)
+ try:
+ extension.ExecuteJavaScript('chrome.autotestPrivate.logout();')
+ except (exceptions.BrowserConnectionGoneException,
+ exceptions.BrowserGoneException):
+ pass
+ util.WaitFor(lambda: not self._IsCryptohomeMounted(), 20)
+
+
+class CrOSScreenLockerTest(cros_test_case.CrOSTestCase):
def _IsScreenLocked(self, browser):
return self._GetLoginStatus(browser)['isScreenLocked']
@@ -157,17 +117,3 @@ class CrOSTest(unittest.TestCase):
self._LockScreen(browser)
self._AttemptUnlockBadPassword(browser)
self._UnlockScreen(browser)
-
- @test.Enabled('chromeos')
- def testLogout(self):
- """Tests autotestPrivate.logout"""
- if self._is_guest:
- return
- with self._CreateBrowser(autotest_ext=True) as b:
- extension = self._GetAutotestExtension(b)
- try:
- extension.ExecuteJavaScript('chrome.autotestPrivate.logout();')
- except (exceptions.BrowserConnectionGoneException,
- exceptions.BrowserGoneException):
- pass
- util.WaitFor(lambda: not self._IsCryptohomeMounted(), 20)
diff --git a/tools/telemetry/telemetry/core/backends/chrome/desktop_browser_backend.py b/tools/telemetry/telemetry/core/backends/chrome/desktop_browser_backend.py
index 3ae5b60077..e192e81d82 100644
--- a/tools/telemetry/telemetry/core/backends/chrome/desktop_browser_backend.py
+++ b/tools/telemetry/telemetry/core/backends/chrome/desktop_browser_backend.py
@@ -2,12 +2,13 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import distutils
import glob
import heapq
import logging
import os
-import subprocess as subprocess
import shutil
+import subprocess as subprocess
import sys
import tempfile
import time
@@ -57,6 +58,7 @@ class DesktopBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
self._port = None
self._profile_dir = None
self._tmp_minidump_dir = tempfile.mkdtemp()
+ self._crash_service = None
self._SetupProfile()
@@ -78,6 +80,44 @@ class DesktopBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
shutil.rmtree(self._tmp_profile_dir)
shutil.copytree(profile_dir, self._tmp_profile_dir)
+ def _GetCrashServicePipeName(self):
+ # Ensure a unique pipe name by using the name of the temp dir.
+ return r'\\.\pipe\%s_service' % os.path.basename(self._tmp_minidump_dir)
+
+ def _StartCrashService(self):
+ os_name = self._browser.platform.GetOSName()
+ if os_name != 'win':
+ return None
+ return subprocess.Popen([
+ support_binaries.FindPath('crash_service', os_name),
+ '--no-window',
+ '--dumps-dir=%s' % self._tmp_minidump_dir,
+ '--pipe-name=%s' % self._GetCrashServicePipeName()])
+
+ def _GetCdbPath(self):
+ search_paths = [os.getenv('PROGRAMFILES(X86)', ''),
+ os.getenv('PROGRAMFILES', ''),
+ os.getenv('LOCALAPPDATA', ''),
+ os.getenv('PATH', '')]
+ possible_paths = [
+ 'Debugging Tools For Windows',
+ 'Debugging Tools For Windows (x86)',
+ 'Debugging Tools For Windows (x64)',
+ os.path.join('Windows Kits', '8.0', 'Debuggers', 'x86'),
+ os.path.join('Windows Kits', '8.0', 'Debuggers', 'x64'),
+ os.path.join('win_toolchain', 'vs2013_files', 'win8sdk', 'Debuggers',
+ 'x86'),
+ os.path.join('win_toolchain', 'vs2013_files', 'win8sdk', 'Debuggers',
+ 'x64'),
+ ]
+ for possible_path in possible_paths:
+ path = distutils.spawn.find_executable(
+ os.path.join(possible_path, 'cdb'),
+ path=os.pathsep.join(search_paths))
+ if path:
+ return path
+ return None
+
def HasBrowserFinishedLaunching(self):
# In addition to the functional check performed by the base class, quickly
# check if the browser process is still alive.
@@ -120,6 +160,8 @@ class DesktopBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
env = os.environ.copy()
env['CHROME_HEADLESS'] = '1' # Don't upload minidumps.
env['BREAKPAD_DUMP_LOCATION'] = self._tmp_minidump_dir
+ env['CHROME_BREAKPAD_PIPE_NAME'] = self._GetCrashServicePipeName()
+ self._crash_service = self._StartCrashService()
logging.debug('Starting Chrome %s', args)
if not self.browser_options.show_stdout:
self._tmp_output_file = tempfile.NamedTemporaryFile('w', 0)
@@ -167,29 +209,40 @@ class DesktopBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
except IOError:
return ''
- def GetStackTrace(self):
- stackwalk = support_binaries.FindPath('minidump_stackwalk',
- self._browser.platform.GetOSName())
- if not stackwalk:
- logging.warning('minidump_stackwalk binary not found. Must build it to '
- 'symbolize crash dumps. Returning browser stdout.')
- return self.GetStandardOutput()
-
+ def _GetMostRecentMinidump(self):
dumps = glob.glob(os.path.join(self._tmp_minidump_dir, '*.dmp'))
if not dumps:
- logging.warning('No crash dump found. Returning browser stdout.')
- return self.GetStandardOutput()
+ return None
most_recent_dump = heapq.nlargest(1, dumps, os.path.getmtime)[0]
if os.path.getmtime(most_recent_dump) < (time.time() - (5 * 60)):
logging.warning('Crash dump is older than 5 minutes. May not be correct.')
+ return most_recent_dump
+
+ def _GetStackFromMinidump(self, minidump):
+ os_name = self._browser.platform.GetOSName()
+ if os_name == 'win':
+ cdb = self._GetCdbPath()
+ if not cdb:
+ logging.warning('cdb.exe not found.')
+ return None
+ output = subprocess.check_output([cdb, '-y', self._browser_directory,
+ '-c', '.ecxr;k30;q', '-z', minidump])
+ stack_start = output.find('ChildEBP')
+ stack_end = output.find('quit:')
+ return output[stack_start:stack_end]
+
+ stackwalk = support_binaries.FindPath('minidump_stackwalk', os_name)
+ if not stackwalk:
+ logging.warning('minidump_stackwalk binary not found.')
+ return None
symbols = glob.glob(os.path.join(self._browser_directory, '*.breakpad*'))
if not symbols:
- logging.warning('No breakpad symbols found. Returning browser stdout.')
- return self.GetStandardOutput()
+ logging.warning('No breakpad symbols found.')
+ return None
- minidump = most_recent_dump + '.stripped'
- with open(most_recent_dump, 'rb') as infile:
+ with open(minidump, 'rb') as infile:
+ minidump += '.stripped'
with open(minidump, 'wb') as outfile:
outfile.write(''.join(infile.read().partition('MDMP')[1:]))
@@ -209,10 +262,21 @@ class DesktopBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
os.makedirs(symbol_path)
shutil.copyfile(symbol, os.path.join(symbol_path, binary + '.sym'))
- error = tempfile.NamedTemporaryFile('w', 0)
- return subprocess.Popen(
- [stackwalk, minidump, symbols_path],
- stdout=subprocess.PIPE, stderr=error).communicate()[0]
+ return subprocess.check_output([stackwalk, minidump, symbols_path],
+ stderr=open(os.devnull, 'w'))
+
+ def GetStackTrace(self):
+ most_recent_dump = self._GetMostRecentMinidump()
+ if not most_recent_dump:
+ logging.warning('No crash dump found. Returning browser stdout.')
+ return self.GetStandardOutput()
+
+ stack = self._GetStackFromMinidump(most_recent_dump)
+ if not stack:
+ logging.warning('Failed to symbolize minidump. Returning browser stdout.')
+ return self.GetStandardOutput()
+
+ return stack
def __del__(self):
self.Close()
@@ -240,6 +304,10 @@ class DesktopBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
finally:
self._proc = None
+ if self._crash_service:
+ self._crash_service.kill()
+ self._crash_service = None
+
if self._output_profile_path:
# If we need the output then double check that it exists.
if not (self._tmp_profile_dir and os.path.exists(self._tmp_profile_dir)):
diff --git a/tools/telemetry/telemetry/core/backends/chrome/inspector_backend.py b/tools/telemetry/telemetry/core/backends/chrome/inspector_backend.py
index dd627a3b69..5e242caf6e 100644
--- a/tools/telemetry/telemetry/core/backends/chrome/inspector_backend.py
+++ b/tools/telemetry/telemetry/core/backends/chrome/inspector_backend.py
@@ -73,7 +73,10 @@ class InspectorBackend(inspector_websocket.InspectorWebsocket):
@property
def url(self):
- return self._context['url']
+ for c in self._browser_backend.ListInspectableContexts():
+ if c['id'] == self.id:
+ return c['url']
+ return None
@property
def id(self):
diff --git a/tools/telemetry/telemetry/core/backends/chrome/inspector_memory_unittest.py b/tools/telemetry/telemetry/core/backends/chrome/inspector_memory_unittest.py
index 41e0541619..e33328cd4a 100644
--- a/tools/telemetry/telemetry/core/backends/chrome/inspector_memory_unittest.py
+++ b/tools/telemetry/telemetry/core/backends/chrome/inspector_memory_unittest.py
@@ -2,11 +2,18 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import logging
+
from telemetry.unittest import tab_test_case
class InspectorMemoryTest(tab_test_case.TabTestCase):
+
def testGetDOMStats(self):
+ if not self._browser.supports_tab_control:
+ logging.warning('Browser does not support tab control, skipping test.')
+ return
+
# Due to an issue with CrOS, we create a new tab here rather than
# using the existing tab to get a consistent starting page on all platforms.
self._tab = self._browser.tabs.New()
diff --git a/tools/telemetry/telemetry/core/backends/chrome/inspector_runtime_unittest.py b/tools/telemetry/telemetry/core/backends/chrome/inspector_runtime_unittest.py
index 97fbf7dd1b..f3d8814be0 100644
--- a/tools/telemetry/telemetry/core/backends/chrome/inspector_runtime_unittest.py
+++ b/tools/telemetry/telemetry/core/backends/chrome/inspector_runtime_unittest.py
@@ -1,10 +1,13 @@
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+
+from telemetry import decorators
from telemetry.core import exceptions
from telemetry.core import util
from telemetry.unittest import tab_test_case
+
class InspectorRuntimeTest(tab_test_case.TabTestCase):
def testRuntimeEvaluateSimple(self):
res = self._tab.EvaluateJavaScript('1+1')
@@ -31,6 +34,7 @@ class InspectorRuntimeTest(tab_test_case.TabTestCase):
def testRuntimeExecuteOfSomethingThatCantJSONize(self):
self._tab.ExecuteJavaScript('window')
+ @decorators.Disabled('android')
def testIFrame(self):
self.Navigate('host.html')
diff --git a/tools/telemetry/telemetry/core/backends/chrome/misc_web_contents_backend.py b/tools/telemetry/telemetry/core/backends/chrome/misc_web_contents_backend.py
index 7eb08568f9..592a96bcb4 100644
--- a/tools/telemetry/telemetry/core/backends/chrome/misc_web_contents_backend.py
+++ b/tools/telemetry/telemetry/core/backends/chrome/misc_web_contents_backend.py
@@ -2,9 +2,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from telemetry.core import web_contents
from telemetry.core.backends.chrome import inspector_backend_list
-
+from telemetry.core.backends.chrome import oobe
class MiscWebContentsBackend(inspector_backend_list.InspectorBackendList):
"""A dynamic sequence of web contents not related to tabs and extensions.
@@ -13,8 +12,10 @@ class MiscWebContentsBackend(inspector_backend_list.InspectorBackendList):
"""
def __init__(self, browser_backend):
+ def OobeBackendWrapper(inspector_backend, backend_list):
+ return oobe.Oobe(inspector_backend, backend_list, browser_backend)
super(MiscWebContentsBackend, self).__init__(
- browser_backend, backend_wrapper=web_contents.WebContents)
+ browser_backend, backend_wrapper=OobeBackendWrapper)
@property
def oobe_exists(self):
diff --git a/tools/telemetry/telemetry/core/backends/chrome/oobe.py b/tools/telemetry/telemetry/core/backends/chrome/oobe.py
new file mode 100644
index 0000000000..ded264c7d8
--- /dev/null
+++ b/tools/telemetry/telemetry/core/backends/chrome/oobe.py
@@ -0,0 +1,113 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from telemetry.core import exceptions
+from telemetry.core import web_contents
+from telemetry.core import util
+
+class Oobe(web_contents.WebContents):
+ def __init__(self, inspector_backend, backend_list, browser_backend):
+ super(Oobe, self).__init__(inspector_backend, backend_list)
+ self._backend = browser_backend
+
+ def _SigninUIState(self):
+ """Returns the signin ui state of the oobe. HIDDEN: 0, GAIA_SIGNIN: 1,
+ ACCOUNT_PICKER: 2, WRONG_HWID_WARNING: 3, MANAGED_USER_CREATION_FLOW: 4.
+ These values are in
+ chrome/browser/resources/chromeos/login/display_manager.js
+ """
+ return self.EvaluateJavaScript('''
+ loginHeader = document.getElementById('login-header-bar')
+ if (loginHeader) {
+ loginHeader.signinUIState_;
+ }
+ ''')
+
+ def _WaitForSigninScreen(self):
+ """Waits for oobe to be on the signin or account picker screen."""
+ def OnAccountPickerScreen():
+ signin_state = self._SigninUIState()
+ # GAIA_SIGNIN or ACCOUNT_PICKER screens.
+ return signin_state == 1 or signin_state == 2
+ try:
+ util.WaitFor(OnAccountPickerScreen, 60)
+ except util.TimeoutException:
+ raise exceptions.LoginException('Timed out waiting for signin screen, '
+ 'signin state %d' % self._SigninUIState())
+
+ def _ClickBrowseAsGuest(self):
+ """Click the Browse As Guest button on the account picker screen. This will
+ restart the browser, and we could have a tab crash or a browser crash."""
+ try:
+ self.EvaluateJavaScript("""
+ var guest = document.getElementById("guest-user-button");
+ if (guest) {
+ guest.click();
+ }
+ """)
+ except (exceptions.TabCrashException,
+ exceptions.BrowserConnectionGoneException):
+ pass
+
+ def _GaiaLoginContext(self):
+ for gaia_context in range(15):
+ try:
+ if self.EvaluateJavaScriptInContext(
+ "document.getElementById('Email') != null", gaia_context):
+ return gaia_context
+ except exceptions.EvaluateException:
+ pass
+ return None
+
+ def NavigateGuestLogin(self):
+ """Navigates through oobe login screen as guest."""
+ logging.info('Logging in as guest')
+ util.WaitFor(lambda: self.EvaluateJavaScript(
+ 'typeof Oobe !== \'undefined\''), 10)
+
+ if self.EvaluateJavaScript(
+ "typeof Oobe.guestLoginForTesting != 'undefined'"):
+ self.ExecuteJavaScript('Oobe.guestLoginForTesting();')
+ else:
+ self._WaitForSigninScreen()
+ self._ClickBrowseAsGuest()
+
+ self._backend.WaitForLogin()
+
+ def NavigateFakeLogin(self, username, password):
+ """Logs in using Oobe.loginForTesting."""
+ logging.info('Invoking Oobe.loginForTesting')
+ util.WaitFor(lambda: self.EvaluateJavaScript(
+ 'typeof Oobe !== \'undefined\''), 10)
+
+ if self.EvaluateJavaScript(
+ 'typeof Oobe.loginForTesting == \'undefined\''):
+ raise exceptions.LoginException('Oobe.loginForTesting js api missing')
+
+ self.ExecuteJavaScript(
+ 'Oobe.loginForTesting(\'%s\', \'%s\');' % (username, password))
+ self._backend.WaitForLogin()
+
+ def NavigateGaiaLogin(self, username, password):
+ """Logs into the GAIA service with provided credentials."""
+ logging.info('Invoking Oobe.addUserForTesting')
+ util.WaitFor(lambda: self.EvaluateJavaScript(
+ 'typeof Oobe !== \'undefined\''), 10)
+ self.ExecuteJavaScript('Oobe.addUserForTesting();')
+
+ try:
+ gaia_context = util.WaitFor(self._GaiaLoginContext, timeout=30)
+ except util.TimeoutException:
+ self._backend.TakeScreenShot('add-user-screen')
+ raise
+
+ self.ExecuteJavaScriptInContext("""
+ document.getElementById('Email').value='%s';
+ document.getElementById('Passwd').value='%s';
+ document.getElementById('signIn').click();"""
+ % (username, password),
+ gaia_context)
+ self._backend.WaitForLogin()
diff --git a/tools/telemetry/telemetry/core/backends/chrome/websocket_unittest.py b/tools/telemetry/telemetry/core/backends/chrome/websocket_unittest.py
index 66f79b3458..a49f4a5ae2 100644
--- a/tools/telemetry/telemetry/core/backends/chrome/websocket_unittest.py
+++ b/tools/telemetry/telemetry/core/backends/chrome/websocket_unittest.py
@@ -1,11 +1,36 @@
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+
+import base64
+import BaseHTTPServer
+import hashlib
import socket
+import threading
import unittest
+
from telemetry.core.backends.chrome import websocket
+
+# Minimal handler for a local websocket server.
+class _FakeWebSocketHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+ def do_GET(self):
+ key = self.headers.getheader('Sec-WebSocket-Key')
+
+ value = key + '258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
+ hashed = base64.encodestring(hashlib.sha1(value).digest()).strip().lower()
+
+ self.send_response(101)
+
+ self.send_header('Sec-Websocket-Accept', hashed)
+ self.send_header('upgrade', 'websocket')
+ self.send_header('connection', 'upgrade')
+ self.end_headers()
+
+ self.wfile.flush()
+
+
class TestWebSocket(unittest.TestCase):
def testExports(self):
self.assertNotEqual(websocket.create_connection, None)
@@ -13,11 +38,16 @@ class TestWebSocket(unittest.TestCase):
self.assertNotEqual(websocket.WebSocketTimeoutException, None)
def testSockOpts(self):
- ws = websocket.create_connection('ws://echo.websocket.org')
+ httpd = BaseHTTPServer.HTTPServer(('', 0), _FakeWebSocketHandler)
+ threading.Thread(target=httpd.handle_request).start()
+ ws_url = 'ws://127.0.0.1:%d' % httpd.server_port
+ ws = websocket.create_connection(ws_url)
self.assertNotEquals(
ws.sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR), 0)
+
+ threading.Thread(target=httpd.handle_request).start()
ws = websocket.create_connection(
- 'ws://echo.websocket.org',
+ ws_url,
sockopt=[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)])
self.assertNotEquals(
ws.sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR), 0)
diff --git a/tools/telemetry/telemetry/core/forwarders/android_forwarder.py b/tools/telemetry/telemetry/core/forwarders/android_forwarder.py
index ccfc286a5d..4e6e6fd346 100644
--- a/tools/telemetry/telemetry/core/forwarders/android_forwarder.py
+++ b/tools/telemetry/telemetry/core/forwarders/android_forwarder.py
@@ -52,14 +52,14 @@ class AndroidForwarder(forwarders.Forwarder):
def __init__(self, adb, port_pairs):
super(AndroidForwarder, self).__init__(port_pairs)
- self._adb = adb.Adb()
- forwarder.Forwarder.Map([p for p in port_pairs if p], self._adb)
+ self._device = adb.device()
+ forwarder.Forwarder.Map([p for p in port_pairs if p], self._device)
# TODO(tonyg): Verify that each port can connect to host.
def Close(self):
for port_pair in self._port_pairs:
if port_pair:
- forwarder.Forwarder.UnmapDevicePort(port_pair.local_port, self._adb)
+ forwarder.Forwarder.UnmapDevicePort(port_pair.local_port, self._device)
super(AndroidForwarder, self).Close()
@@ -157,9 +157,10 @@ class AndroidRndisConfigurator(object):
_TELEMETRY_INTERFACE_FILE = '/etc/network/interfaces.d/telemetry-{}.conf'
def __init__(self, adb):
- is_root_enabled = adb.Adb().EnableAdbRoot()
+ self._device = adb.device()
+
+ is_root_enabled = self._device.old_interface.EnableAdbRoot()
assert is_root_enabled, 'RNDIS forwarding requires a rooted device.'
- self._adb = adb.Adb()
self._device_ip = None
self._host_iface = None
@@ -175,15 +176,15 @@ class AndroidRndisConfigurator(object):
def _IsRndisSupported(self):
"""Checks that the device has RNDIS support in the kernel."""
- return self._adb.FileExistsOnDevice(
+ return self._device.old_interface.FileExistsOnDevice(
'%s/f_rndis/device' % self._RNDIS_DEVICE)
def _WaitForDevice(self):
- self._adb.Adb().SendCommand('wait-for-device')
+ self._device.old_interface.Adb().SendCommand('wait-for-device')
def _FindDeviceRndisInterface(self):
"""Returns the name of the RNDIS network interface if present."""
- config = self._adb.RunShellCommand('netcfg')
+ config = self._device.old_interface.RunShellCommand('netcfg')
interfaces = [line.split()[0] for line in config]
candidates = [iface for iface in interfaces if re.match('rndis|usb', iface)]
if candidates:
@@ -200,7 +201,7 @@ class AndroidRndisConfigurator(object):
def _FindHostRndisInterface(self):
"""Returns the name of the host-side network interface."""
interface_list = self._EnumerateHostInterfaces()
- ether_address = self._adb.GetFileContents(
+ ether_address = self._device.old_interface.GetFileContents(
'%s/f_rndis/ethaddr' % self._RNDIS_DEVICE)[0]
interface_name = None
for line in interface_list:
@@ -214,7 +215,7 @@ class AndroidRndisConfigurator(object):
['sudo', 'bash', '-c', 'echo -e "%s" > %s' % (contents, path)])
def _DisableRndis(self):
- self._adb.system_properties['sys.usb.config'] = 'adb'
+ self._device.old_interface.system_properties['sys.usb.config'] = 'adb'
self._WaitForDevice()
def _EnableRndis(self):
@@ -253,12 +254,13 @@ function doit() {
doit &
""" % {'dev': self._RNDIS_DEVICE, 'functions': 'rndis,adb',
'prefix': script_prefix }
- self._adb.SetFileContents('%s.sh' % script_prefix, script)
+ self._device.old_interface.SetFileContents('%s.sh' % script_prefix, script)
# TODO(szym): run via su -c if necessary.
- self._adb.RunShellCommand('rm %s.log' % script_prefix)
- self._adb.RunShellCommand('. %s.sh' % script_prefix)
+ self._device.old_interface.RunShellCommand('rm %s.log' % script_prefix)
+ self._device.old_interface.RunShellCommand('. %s.sh' % script_prefix)
self._WaitForDevice()
- result = self._adb.GetFileContents('%s.log' % script_prefix)
+ result = self._device.old_interface.GetFileContents(
+ '%s.log' % script_prefix)
assert any('DONE' in line for line in result), 'RNDIS script did not run!'
def _CheckEnableRndis(self, force):
@@ -305,15 +307,16 @@ doit &
"""Returns the IP addresses on all connected devices.
Excludes interface |excluded_iface| on the selected device.
"""
- my_device = self._adb.GetDevice()
+ my_device = self._device.old_interface.GetDevice()
addresses = []
- for device in adb_commands.GetAttachedDevices():
- adb = adb_commands.AdbCommands(device).Adb()
- if device == my_device:
+ for device_serial in adb_commands.GetAttachedDevices():
+ device = adb_commands.AdbCommands(device_serial).device()
+ if device_serial == my_device:
excluded = excluded_iface
else:
excluded = 'no interfaces excluded on other devices'
- addresses += [line.split()[2] for line in adb.RunShellCommand('netcfg')
+ addresses += [line.split()[2]
+ for line in device.old_interface.RunShellCommand('netcfg')
if excluded not in line]
return addresses
@@ -343,7 +346,8 @@ doit &
if candidate not in used_addresses:
return candidate
- orig_interfaces = open(self._NETWORK_INTERFACES, 'r').read()
+ with open(self._NETWORK_INTERFACES) as f:
+ orig_interfaces = f.read()
if self._INTERFACES_INCLUDE not in orig_interfaces:
interfaces = '\n'.join([
orig_interfaces,
@@ -409,7 +413,7 @@ doit &
netmask = _Long2Ip(netmask)
# TODO(szym) run via su -c if necessary.
- self._adb.RunShellCommand('ifconfig %s %s netmask %s up' %
+ self._device.old_interface.RunShellCommand('ifconfig %s %s netmask %s up' %
(device_iface, device_ip, netmask))
# Enabling the interface sometimes breaks adb.
self._WaitForDevice()
diff --git a/tools/telemetry/telemetry/core/memory_cache_http_server.py b/tools/telemetry/telemetry/core/memory_cache_http_server.py
index e89316170f..9423a25dad 100644
--- a/tools/telemetry/telemetry/core/memory_cache_http_server.py
+++ b/tools/telemetry/telemetry/core/memory_cache_http_server.py
@@ -23,6 +23,9 @@ ResourceAndRange = namedtuple('ResourceAndRange', ['resource', 'byte_range'])
class MemoryCacheHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
+ protocol_version = 'HTTP/1.1' # override BaseHTTPServer setting
+ wbufsize = -1 # override StreamRequestHandler (a base class) setting
+
def do_GET(self):
"""Serve a GET request."""
resource_range = self.SendHead()
diff --git a/tools/telemetry/telemetry/core/platform/android_platform_backend.py b/tools/telemetry/telemetry/core/platform/android_platform_backend.py
index 9591ca3cfc..d0ec854e32 100644
--- a/tools/telemetry/telemetry/core/platform/android_platform_backend.py
+++ b/tools/telemetry/telemetry/core/platform/android_platform_backend.py
@@ -41,24 +41,24 @@ _HOST_APPLICATIONS = [
class AndroidPlatformBackend(
proc_supporting_platform_backend.ProcSupportingPlatformBackend):
- def __init__(self, adb, no_performance_mode):
+ def __init__(self, device, no_performance_mode):
super(AndroidPlatformBackend, self).__init__()
- self._adb = adb
+ self._device = device
self._surface_stats_collector = None
- self._perf_tests_setup = perf_control.PerfControl(self._adb)
- self._thermal_throttle = thermal_throttle.ThermalThrottle(self._adb)
+ self._perf_tests_setup = perf_control.PerfControl(self._device)
+ self._thermal_throttle = thermal_throttle.ThermalThrottle(self._device)
self._no_performance_mode = no_performance_mode
self._raw_display_frame_rate_measurements = []
self._host_platform_backend = factory.GetPlatformBackendForCurrentOS()
self._can_access_protected_file_contents = \
- self._adb.CanAccessProtectedFileContents()
+ self._device.old_interface.CanAccessProtectedFileContents()
power_controller = power_monitor_controller.PowerMonitorController([
monsoon_power_monitor.MonsoonPowerMonitor(),
- android_ds2784_power_monitor.DS2784PowerMonitor(adb),
- android_dumpsys_power_monitor.DumpsysPowerMonitor(adb),
+ android_ds2784_power_monitor.DS2784PowerMonitor(device),
+ android_dumpsys_power_monitor.DumpsysPowerMonitor(device),
])
self._powermonitor = android_temperature_monitor.AndroidTemperatureMonitor(
- power_controller, adb)
+ power_controller, device)
self._video_recorder = None
self._video_output = None
if self._no_performance_mode:
@@ -72,7 +72,7 @@ class AndroidPlatformBackend(
# Clear any leftover data from previous timed out tests
self._raw_display_frame_rate_measurements = []
self._surface_stats_collector = \
- surface_stats_collector.SurfaceStatsCollector(self._adb)
+ surface_stats_collector.SurfaceStatsCollector(self._device)
self._surface_stats_collector.Start()
def StopRawDisplayFrameRateMeasurement(self):
@@ -110,14 +110,16 @@ class AndroidPlatformBackend(
return self._thermal_throttle.HasBeenThrottled()
def GetSystemCommitCharge(self):
- for line in self._adb.RunShellCommand('dumpsys meminfo', log_result=False):
+ for line in self._device.old_interface.RunShellCommand(
+ 'dumpsys meminfo', log_result=False):
if line.startswith('Total PSS: '):
return int(line.split()[2]) * 1024
return 0
@decorators.Cache
def GetSystemTotalPhysicalMemory(self):
- for line in self._adb.RunShellCommand('dumpsys meminfo', log_result=False):
+ for line in self._device.old_interface.RunShellCommand(
+ 'dumpsys meminfo', log_result=False):
if line.startswith('Total RAM: '):
return int(line.split()[2]) * 1024
return 0
@@ -140,16 +142,16 @@ class AndroidPlatformBackend(
This can be used to make memory measurements more stable in particular.
"""
if not android_prebuilt_profiler_helper.InstallOnDevice(
- self._adb, 'purge_ashmem'):
+ self._device, 'purge_ashmem'):
raise Exception('Error installing purge_ashmem.')
- if self._adb.RunShellCommand(
+ if self._device.old_interface.RunShellCommand(
android_prebuilt_profiler_helper.GetDevicePath('purge_ashmem'),
log_result=True):
return
raise Exception('Error while purging ashmem.')
def GetMemoryStats(self, pid):
- memory_usage = self._adb.GetMemoryUsageForPid(pid)[0]
+ memory_usage = self._device.old_interface.GetMemoryUsageForPid(pid)[0]
return {'ProportionalSetSize': memory_usage['Pss'] * 1024,
'SharedDirty': memory_usage['Shared_Dirty'] * 1024,
'PrivateDirty': memory_usage['Private_Dirty'] * 1024,
@@ -182,20 +184,21 @@ class AndroidPlatformBackend(
@decorators.Cache
def GetOSVersionName(self):
- return self._adb.GetBuildId()[0]
+ return self._device.old_interface.GetBuildId()[0]
def CanFlushIndividualFilesFromSystemCache(self):
return False
def FlushEntireSystemCache(self):
- cache = cache_control.CacheControl(self._adb)
+ cache = cache_control.CacheControl(self._device)
cache.DropRamCaches()
def FlushSystemCacheForDirectory(self, directory, ignoring=None):
raise NotImplementedError()
def FlushDnsCache(self):
- self._adb.RunShellCommandWithSU('ndc resolver flushdefaultif')
+ self._device.old_interface.RunShellCommandWithSU(
+ 'ndc resolver flushdefaultif')
def LaunchApplication(
self, application, parameters=None, elevate_privilege=False):
@@ -207,12 +210,13 @@ class AndroidPlatformBackend(
raise NotImplementedError("elevate_privilege isn't supported on android.")
if not parameters:
parameters = ''
- self._adb.RunShellCommand('am start ' + parameters + ' ' + application)
+ self._device.old_interface.RunShellCommand(
+ 'am start ' + parameters + ' ' + application)
def IsApplicationRunning(self, application):
if application in _HOST_APPLICATIONS:
return self._host_platform_backend.IsApplicationRunning(application)
- return len(self._adb.ExtractPid(application)) > 0
+ return len(self._device.old_interface.ExtractPid(application)) > 0
def CanLaunchApplication(self, application):
if application in _HOST_APPLICATIONS:
@@ -239,7 +243,7 @@ class AndroidPlatformBackend(
if self.is_video_capture_running:
self._video_recorder.Stop()
self._video_recorder = screenshot.VideoRecorder(
- self._adb, self._video_output, megabits_per_second=min_bitrate_mbps)
+ self._device, self._video_output, megabits_per_second=min_bitrate_mbps)
self._video_recorder.Start()
util.WaitFor(self._video_recorder.IsStarted, 5)
@@ -323,7 +327,7 @@ class AndroidPlatformBackend(
logging.warning('%s cannot be retrieved on non-rooted device.' % fname)
return ''
return '\n'.join(
- self._adb.GetProtectedFileContents(fname))
+ self._device.old_interface.GetProtectedFileContents(fname))
def _GetPsOutput(self, columns, pid=None):
assert columns == ['pid', 'name'] or columns == ['pid'], \
@@ -331,7 +335,8 @@ class AndroidPlatformBackend(
command = 'ps'
if pid:
command += ' -p %d' % pid
- ps = self._adb.RunShellCommand(command, log_result=False)[1:]
+ ps = self._device.old_interface.RunShellCommand(
+ command, log_result=False)[1:]
output = []
for line in ps:
data = line.split()
diff --git a/tools/telemetry/telemetry/core/platform/android_platform_backend_unittest.py b/tools/telemetry/telemetry/core/platform/android_platform_backend_unittest.py
index be8d156488..8217d00d23 100644
--- a/tools/telemetry/telemetry/core/platform/android_platform_backend_unittest.py
+++ b/tools/telemetry/telemetry/core/platform/android_platform_backend_unittest.py
@@ -34,6 +34,11 @@ class MockAdbCommands(object):
return []
+class MockDevice(object):
+ def __init__(self, mock_adb_commands):
+ self.old_interface = mock_adb_commands
+
+
class AndroidPlatformBackendTest(unittest.TestCase):
def setUp(self):
self._stubs = system_stub.Override(android_platform_backend,
@@ -50,7 +55,7 @@ class AndroidPlatformBackendTest(unittest.TestCase):
'4294967295 1074458624 1074463824 3197495984 3197494152 '
'1074767676 0 4612 0 38136 4294967295 0 0 17 0 0 0 0 0 0 '
'1074470376 1074470912 1102155776']
- adb_valid_proc_content = MockAdbCommands(proc_stat_content, {})
+ adb_valid_proc_content = MockDevice(MockAdbCommands(proc_stat_content, {}))
backend = android_platform_backend.AndroidPlatformBackend(
adb_valid_proc_content, False)
cpu_stats = backend.GetCpuStats('7702')
@@ -59,7 +64,7 @@ class AndroidPlatformBackendTest(unittest.TestCase):
@test.Disabled('chromeos')
def testGetCpuStatsInvalidPID(self):
# Mock an empty /proc/pid/stat.
- adb_empty_proc_stat = MockAdbCommands([], {})
+ adb_empty_proc_stat = MockDevice(MockAdbCommands([], {}))
backend = android_platform_backend.AndroidPlatformBackend(
adb_empty_proc_stat, False)
cpu_stats = backend.GetCpuStats('7702')
@@ -67,7 +72,7 @@ class AndroidPlatformBackendTest(unittest.TestCase):
@test.Disabled
def testFramesFromMp4(self):
- mock_adb = MockAdbCommands([])
+ mock_adb = MockDevice(MockAdbCommands([]))
backend = android_platform_backend.AndroidPlatformBackend(mock_adb, False)
try:
diff --git a/tools/telemetry/telemetry/core/platform/linux_platform_backend.py b/tools/telemetry/telemetry/core/platform/linux_platform_backend.py
index 044a9946c5..679e56c45a 100644
--- a/tools/telemetry/telemetry/core/platform/linux_platform_backend.py
+++ b/tools/telemetry/telemetry/core/platform/linux_platform_backend.py
@@ -45,14 +45,15 @@ class LinuxPlatformBackend(
codename = None
version = None
- for line in open('/etc/lsb-release', 'r').readlines():
- key, _, value = line.partition('=')
- if key == 'DISTRIB_CODENAME':
- codename = value.strip()
- elif key == 'DISTRIB_RELEASE':
- version = float(value)
- if codename and version:
- break
+ with open('/etc/lsb-release') as f:
+ for line in f.readlines():
+ key, _, value = line.partition('=')
+ if key == 'DISTRIB_CODENAME':
+ codename = value.strip()
+ elif key == 'DISTRIB_RELEASE':
+ version = float(value)
+ if codename and version:
+ break
return platform_backend.OSVersion(codename, version)
def CanFlushIndividualFilesFromSystemCache(self):
diff --git a/tools/telemetry/telemetry/core/platform/power_monitor/android_ds2784_power_monitor.py b/tools/telemetry/telemetry/core/platform/power_monitor/android_ds2784_power_monitor.py
index 5ee5eb1e68..4d5d350e66 100644
--- a/tools/telemetry/telemetry/core/platform/power_monitor/android_ds2784_power_monitor.py
+++ b/tools/telemetry/telemetry/core/platform/power_monitor/android_ds2784_power_monitor.py
@@ -19,23 +19,23 @@ VOLTAGE = os.path.join(FUEL_GAUGE_PATH, 'voltage_now')
class DS2784PowerMonitor(power_monitor.PowerMonitor):
- def __init__(self, adb):
+ def __init__(self, device):
super(DS2784PowerMonitor, self).__init__()
- self._adb = adb
+ self._device = device
self._powermonitor_process_port = None
- android_prebuilt_profiler_helper.InstallOnDevice(adb, 'file_poller')
+ android_prebuilt_profiler_helper.InstallOnDevice(device, 'file_poller')
self._file_poller_binary = android_prebuilt_profiler_helper.GetDevicePath(
'file_poller')
@decorators.Cache
def _HasFuelGauge(self):
- return self._adb.FileExistsOnDevice(CHARGE_COUNTER)
+ return self._device.old_interface.FileExistsOnDevice(CHARGE_COUNTER)
def CanMonitorPower(self):
if not self._HasFuelGauge():
return False
- if self._adb.IsDeviceCharging():
+ if self._device.old_interface.IsDeviceCharging():
logging.warning('Can\'t monitor power usage since device is charging.')
return False
return True
@@ -43,15 +43,16 @@ class DS2784PowerMonitor(power_monitor.PowerMonitor):
def StartMonitoringPower(self, browser):
assert not self._powermonitor_process_port, (
'Must call StopMonitoringPower().')
- self._powermonitor_process_port = int(self._adb.RunShellCommand(
- '%s %d %s %s %s' % (self._file_poller_binary, SAMPLE_RATE_HZ,
- CHARGE_COUNTER, CURRENT, VOLTAGE))[0])
+ self._powermonitor_process_port = int(
+ self._device.old_interface.RunShellCommand(
+ '%s %d %s %s %s' % (self._file_poller_binary, SAMPLE_RATE_HZ,
+ CHARGE_COUNTER, CURRENT, VOLTAGE))[0])
def StopMonitoringPower(self):
assert self._powermonitor_process_port, (
'StartMonitoringPower() not called.')
try:
- result = '\n'.join(self._adb.RunShellCommand(
+ result = '\n'.join(self._device.old_interface.RunShellCommand(
'%s %d' % (self._file_poller_binary,
self._powermonitor_process_port)))
assert result, 'PowerMonitor produced no output'
diff --git a/tools/telemetry/telemetry/core/platform/power_monitor/android_dumpsys_power_monitor.py b/tools/telemetry/telemetry/core/platform/power_monitor/android_dumpsys_power_monitor.py
index 0e7cef57b2..15e80f0356 100644
--- a/tools/telemetry/telemetry/core/platform/power_monitor/android_dumpsys_power_monitor.py
+++ b/tools/telemetry/telemetry/core/platform/power_monitor/android_dumpsys_power_monitor.py
@@ -15,18 +15,18 @@ class DumpsysPowerMonitor(power_monitor.PowerMonitor):
consumption of a single android application. This measure uses a heuristic
and is the same information end-users see with the battery application.
"""
- def __init__(self, adb):
+ def __init__(self, device):
"""Constructor.
Args:
- adb: adb proxy.
+ device: DeviceUtils instance.
"""
super(DumpsysPowerMonitor, self).__init__()
- self._adb = adb
+ self._device = device
self._browser = None
def CanMonitorPower(self):
- return self._adb.CanControlUsbCharging()
+ return self._device.old_interface.CanControlUsbCharging()
def StartMonitoringPower(self, browser):
assert not self._browser, (
@@ -35,18 +35,19 @@ class DumpsysPowerMonitor(power_monitor.PowerMonitor):
# Disable the charging of the device over USB. This is necessary because the
# device only collects information about power usage when the device is not
# charging.
- self._adb.DisableUsbCharging()
+ self._device.old_interface.DisableUsbCharging()
def StopMonitoringPower(self):
assert self._browser, (
'StartMonitoringPower() not called.')
try:
- self._adb.EnableUsbCharging()
+ self._device.old_interface.EnableUsbCharging()
# pylint: disable=W0212
package = self._browser._browser_backend.package
# By default, 'dumpsys batterystats' measures power consumption during the
# last unplugged period.
- result = self._adb.RunShellCommand('dumpsys batterystats -c %s' % package)
+ result = self._device.old_interface.RunShellCommand(
+ 'dumpsys batterystats -c %s' % package)
assert result, 'Dumpsys produced no output'
return DumpsysPowerMonitor.ParseSamplingOutput(package, result)
finally:
diff --git a/tools/telemetry/telemetry/core/platform/power_monitor/android_temperature_monitor.py b/tools/telemetry/telemetry/core/platform/power_monitor/android_temperature_monitor.py
index 90d32a5ca2..5865510779 100644
--- a/tools/telemetry/telemetry/core/platform/power_monitor/android_temperature_monitor.py
+++ b/tools/telemetry/telemetry/core/platform/power_monitor/android_temperature_monitor.py
@@ -13,9 +13,9 @@ class AndroidTemperatureMonitor(power_monitor.PowerMonitor):
Delegates monitoring to another PowerMonitor and adds temperature measurements
to overall results.
"""
- def __init__(self, monitor, adb):
+ def __init__(self, monitor, device):
super(AndroidTemperatureMonitor, self).__init__()
- self._adb = adb
+ self._device = device
self._power_monitor = monitor
self._can_monitor_with_power_monitor = None
@@ -58,7 +58,7 @@ class AndroidTemperatureMonitor(power_monitor.PowerMonitor):
return power_data
def _GetBoardTemperatureCelsius(self):
- contents = self._adb.GetFileContents(_TEMPERATURE_FILE)
+ contents = self._device.old_interface.GetFileContents(_TEMPERATURE_FILE)
if len(contents) > 0:
return float(contents[0])
return None
diff --git a/tools/telemetry/telemetry/core/platform/power_monitor/android_temperature_monitor_unittest.py b/tools/telemetry/telemetry/core/platform/power_monitor/android_temperature_monitor_unittest.py
index eac74b36da..bd79a470d6 100644
--- a/tools/telemetry/telemetry/core/platform/power_monitor/android_temperature_monitor_unittest.py
+++ b/tools/telemetry/telemetry/core/platform/power_monitor/android_temperature_monitor_unittest.py
@@ -62,9 +62,11 @@ class AndroidTemperatureMonitorTest(unittest.TestCase):
mock_power_monitor.ExpectCall('CanMonitorPower').WillReturn(False)
mock_adb = simple_mock.MockObject()
mock_adb.ExpectCall('GetFileContents', _).WillReturn([])
+ mock_device_utils = simple_mock.MockObject()
+ setattr(mock_device_utils, 'old_interface', mock_adb)
monitor = android_temperature_monitor.AndroidTemperatureMonitor(
- mock_power_monitor, mock_adb)
+ mock_power_monitor, mock_device_utils)
self.assertTrue(monitor.CanMonitorPower())
monitor.StartMonitoringPower(None)
measurements = monitor.StopMonitoringPower()
diff --git a/tools/telemetry/telemetry/core/platform/power_monitor/powermetrics_power_monitor_unittest.py b/tools/telemetry/telemetry/core/platform/power_monitor/powermetrics_power_monitor_unittest.py
index 6f2d1607db..0e22959c6e 100644
--- a/tools/telemetry/telemetry/core/platform/power_monitor/powermetrics_power_monitor_unittest.py
+++ b/tools/telemetry/telemetry/core/platform/power_monitor/powermetrics_power_monitor_unittest.py
@@ -32,9 +32,9 @@ class PowerMetricsPowerMonitorTest(unittest.TestCase):
@test.Enabled('mac')
def testParsePowerMetricsOutput(self):
def getOutput(output_file):
- test_data_path = os.path.join(util.GetUnittestDataDir(),
- output_file)
- process_output = open(test_data_path, 'r').read()
+ test_data_path = os.path.join(util.GetUnittestDataDir(), output_file)
+ with open(test_data_path, 'r') as f:
+ process_output = f.read()
return (powermetrics_power_monitor.PowerMetricsPowerMonitor.
ParsePowerMetricsOutput(process_output))
diff --git a/tools/telemetry/telemetry/core/platform/proc_supporting_platform_backend_unittest.py b/tools/telemetry/telemetry/core/platform/proc_supporting_platform_backend_unittest.py
index 81069b841c..59da734566 100644
--- a/tools/telemetry/telemetry/core/platform/proc_supporting_platform_backend_unittest.py
+++ b/tools/telemetry/telemetry/core/platform/proc_supporting_platform_backend_unittest.py
@@ -33,15 +33,12 @@ class ProcSupportingPlatformBackendTest(unittest.TestCase):
return
backend = TestBackend()
- backend.SetMockFile(
- '/proc/1/stat',
- open(os.path.join(util.GetUnittestDataDir(), 'stat')).read())
- backend.SetMockFile(
- '/proc/1/status',
- open(os.path.join(util.GetUnittestDataDir(), 'status')).read())
- backend.SetMockFile(
- '/proc/1/smaps',
- open(os.path.join(util.GetUnittestDataDir(), 'smaps')).read())
+ with open(os.path.join(util.GetUnittestDataDir(), 'stat')) as f:
+ backend.SetMockFile('/proc/1/stat', f.read())
+ with open(os.path.join(util.GetUnittestDataDir(), 'status')) as f:
+ backend.SetMockFile('/proc/1/status', f.read())
+ with open(os.path.join(util.GetUnittestDataDir(), 'smaps')) as f:
+ backend.SetMockFile('/proc/1/smaps', f.read())
result = backend.GetMemoryStats(1)
self.assertEquals(result, {'PrivateDirty': 5324800,
'VM': 1025978368,
@@ -55,15 +52,12 @@ class ProcSupportingPlatformBackendTest(unittest.TestCase):
return
backend = TestBackend()
- backend.SetMockFile(
- '/proc/1/stat',
- open(os.path.join(util.GetUnittestDataDir(), 'stat')).read())
- backend.SetMockFile(
- '/proc/1/status',
- open(os.path.join(util.GetUnittestDataDir(), 'status_nohwm')).read())
- backend.SetMockFile(
- '/proc/1/smaps',
- open(os.path.join(util.GetUnittestDataDir(), 'smaps')).read())
+ with open(os.path.join(util.GetUnittestDataDir(), 'stat')) as f:
+ backend.SetMockFile('/proc/1/stat', f.read())
+ with open(os.path.join(util.GetUnittestDataDir(), 'status_nohwm')) as f:
+ backend.SetMockFile('/proc/1/status', f.read())
+ with open(os.path.join(util.GetUnittestDataDir(), 'smaps')) as f:
+ backend.SetMockFile('/proc/1/smaps', f.read())
result = backend.GetMemoryStats(1)
self.assertEquals(result, {'PrivateDirty': 5324800,
'VM': 1025978368,
diff --git a/tools/telemetry/telemetry/core/platform/profiler/android_memreport_profiler.py b/tools/telemetry/telemetry/core/platform/profiler/android_memreport_profiler.py
deleted file mode 100644
index 1dd206d203..0000000000
--- a/tools/telemetry/telemetry/core/platform/profiler/android_memreport_profiler.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import subprocess
-
-from telemetry.core import util
-from telemetry.core.backends.chrome import android_browser_finder
-from telemetry.core.platform import profiler
-
-class AndroidMemReportProfiler(profiler.Profiler):
- """Android-specific, collects 'memreport' graphs."""
-
- def __init__(self, browser_backend, platform_backend, output_path, state):
- super(AndroidMemReportProfiler, self).__init__(
- browser_backend, platform_backend, output_path, state)
- self._html_file = output_path + '.html'
- self._memreport = subprocess.Popen(
- [os.path.join(util.GetChromiumSrcDir(),
- 'tools', 'android', 'memdump', 'memreport.py'),
- '--manual-graph', '--package', browser_backend.package],
- stdout=file(self._html_file, 'w'),
- stdin=subprocess.PIPE)
-
- @classmethod
- def name(cls):
- return 'android-memreport'
-
- @classmethod
- def is_supported(cls, browser_type):
- if browser_type == 'any':
- return android_browser_finder.CanFindAvailableBrowsers()
- return browser_type.startswith('android')
-
- def CollectProfile(self):
- self._memreport.communicate(input='\n')
- print 'To view the memory report, open:'
- print self._html_file
- return [self._html_file]
diff --git a/tools/telemetry/telemetry/core/platform/profiler/android_prebuilt_profiler_helper.py b/tools/telemetry/telemetry/core/platform/profiler/android_prebuilt_profiler_helper.py
index 17abf08ee7..fa838eafe8 100644
--- a/tools/telemetry/telemetry/core/platform/profiler/android_prebuilt_profiler_helper.py
+++ b/tools/telemetry/telemetry/core/platform/profiler/android_prebuilt_profiler_helper.py
@@ -19,7 +19,7 @@ def GetDevicePath(profiler_binary):
@decorators.Cache
-def InstallOnDevice(adb, profiler_binary):
+def InstallOnDevice(device, profiler_binary):
host_path = support_binaries.FindPath(profiler_binary, 'android')
if not host_path:
logging.error('Profiler binary "%s" not found. Could not be installed',
@@ -27,6 +27,7 @@ def InstallOnDevice(adb, profiler_binary):
return False
device_binary_path = GetDevicePath(profiler_binary)
- adb.PushIfNeeded(host_path, device_binary_path)
- adb.RunShellCommand('chmod 777 ' + device_binary_path)
+ device.old_interface.PushIfNeeded(host_path, device_binary_path)
+ device.old_interface.RunShellCommand('chmod 777 ' + device_binary_path)
return True
+
diff --git a/tools/telemetry/telemetry/core/platform/profiler/android_screen_recorder_profiler.py b/tools/telemetry/telemetry/core/platform/profiler/android_screen_recorder_profiler.py
index 030ccb201f..423b250fde 100644
--- a/tools/telemetry/telemetry/core/platform/profiler/android_screen_recorder_profiler.py
+++ b/tools/telemetry/telemetry/core/platform/profiler/android_screen_recorder_profiler.py
@@ -21,7 +21,7 @@ class AndroidScreenRecordingProfiler(profiler.Profiler):
'screenshot.py'),
'--video',
'--file', self._output_path,
- '--device', browser_backend.adb.device()],
+ '--device', browser_backend.adb.device_serial()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
diff --git a/tools/telemetry/telemetry/core/platform/profiler/android_traceview_profiler.py b/tools/telemetry/telemetry/core/platform/profiler/android_traceview_profiler.py
index a1d3510ab3..9513f0b954 100644
--- a/tools/telemetry/telemetry/core/platform/profiler/android_traceview_profiler.py
+++ b/tools/telemetry/telemetry/core/platform/profiler/android_traceview_profiler.py
@@ -18,7 +18,7 @@ class AndroidTraceviewProfiler(profiler.Profiler):
super(AndroidTraceviewProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
- if self._browser_backend.adb.Adb().FileExistsOnDevice(
+ if self._browser_backend.adb.device().old_interface.FileExistsOnDevice(
self._DEFAULT_DEVICE_DIR):
self._browser_backend.adb.RunShellCommand(
'rm ' + os.path.join(self._DEFAULT_DEVICE_DIR, '*'))
@@ -52,7 +52,7 @@ class AndroidTraceviewProfiler(profiler.Profiler):
self._browser_backend.adb.RunShellCommand('am profile %s stop' % pid)
util.WaitFor(lambda: self._FileSize(trace_file) > 0, timeout=10)
output_files.append(trace_file)
- self._browser_backend.adb.Adb().Adb().Pull(
+ self._browser_backend.adb.device().old_interface.Adb().Pull(
self._DEFAULT_DEVICE_DIR, self._output_path)
self._browser_backend.adb.RunShellCommand(
'rm ' + os.path.join(self._DEFAULT_DEVICE_DIR, '*'))
@@ -62,5 +62,6 @@ class AndroidTraceviewProfiler(profiler.Profiler):
return output_files
def _FileSize(self, file_name):
- f = self._browser_backend.adb.Adb().ListPathContents(file_name)
+ f = self._browser_backend.adb.device().old_interface.ListPathContents(
+ file_name)
return f.get(os.path.basename(file_name), (0, ))[0]
diff --git a/tools/telemetry/telemetry/core/platform/profiler/java_heap_profiler.py b/tools/telemetry/telemetry/core/platform/profiler/java_heap_profiler.py
index 16a9cca706..a02e84c2c5 100644
--- a/tools/telemetry/telemetry/core/platform/profiler/java_heap_profiler.py
+++ b/tools/telemetry/telemetry/core/platform/profiler/java_heap_profiler.py
@@ -46,7 +46,7 @@ class JavaHeapProfiler(profiler.Profiler):
def CollectProfile(self):
self._timer.cancel()
self._DumpJavaHeap(True)
- self._browser_backend.adb.Adb().Adb().Pull(
+ self._browser_backend.adb.device().old_interface.Adb().Pull(
self._DEFAULT_DEVICE_DIR, self._output_path)
self._browser_backend.adb.RunShellCommand(
'rm ' + os.path.join(self._DEFAULT_DEVICE_DIR, '*'))
@@ -65,7 +65,7 @@ class JavaHeapProfiler(profiler.Profiler):
self._DumpJavaHeap(False)
def _DumpJavaHeap(self, wait_for_completion):
- if not self._browser_backend.adb.Adb().FileExistsOnDevice(
+ if not self._browser_backend.adb.device().old_interface.FileExistsOnDevice(
self._DEFAULT_DEVICE_DIR):
self._browser_backend.adb.RunShellCommand(
'mkdir -p ' + self._DEFAULT_DEVICE_DIR)
@@ -83,5 +83,6 @@ class JavaHeapProfiler(profiler.Profiler):
self._run_count += 1
def _FileSize(self, file_name):
- f = self._browser_backend.adb.Adb().ListPathContents(file_name)
+ f = self._browser_backend.adb.device().old_interface.ListPathContents(
+ file_name)
return f.get(os.path.basename(file_name), (0, ))[0]
diff --git a/tools/telemetry/telemetry/core/platform/profiler/netlog_profiler.py b/tools/telemetry/telemetry/core/platform/profiler/netlog_profiler.py
index e828a749cf..c3d8ef0594 100644
--- a/tools/telemetry/telemetry/core/platform/profiler/netlog_profiler.py
+++ b/tools/telemetry/telemetry/core/platform/profiler/netlog_profiler.py
@@ -35,9 +35,11 @@ class NetLogProfiler(profiler.Profiler):
# On Android pull the output file to the host.
if self._platform_backend.GetOSName() == 'android':
host_output_file = '%s.json' % self._output_path
- self._browser_backend.adb.Adb().Adb().Pull(output_file, host_output_file)
+ self._browser_backend.adb.device().old_interface.Adb().Pull(
+ output_file, host_output_file)
# Clean the device
- self._browser_backend.adb.Adb().RunShellCommand('rm %s' % output_file)
+ self._browser_backend.adb.device().old_interface.RunShellCommand(
+ 'rm %s' % output_file)
output_file = host_output_file
print 'Net-internals log saved as %s' % output_file
print 'To view, open in chrome://net-internals'
diff --git a/tools/telemetry/telemetry/core/platform/profiler/perf_profiler.py b/tools/telemetry/telemetry/core/platform/profiler/perf_profiler.py
index e1fe00e6aa..49ecc7cca0 100644
--- a/tools/telemetry/telemetry/core/platform/profiler/perf_profiler.py
+++ b/tools/telemetry/telemetry/core/platform/profiler/perf_profiler.py
@@ -32,7 +32,7 @@ class _SingleProcessPerfProfiler(object):
if self._is_android:
perf_binary = android_prebuilt_profiler_helper.GetDevicePath(
'perf')
- cmd_prefix = ['adb', '-s', browser_backend.adb.device(), 'shell',
+ cmd_prefix = ['adb', '-s', browser_backend.adb.device_serial(), 'shell',
perf_binary]
output_file = os.path.join('/sdcard', 'perf_profiles',
os.path.basename(output_file))
@@ -41,8 +41,10 @@ class _SingleProcessPerfProfiler(object):
'mkdir -p ' + os.path.dirname(self._device_output_file))
else:
cmd_prefix = ['perf']
+ # In perf 3.13 --call-graph requires an argument, so use
+ # the -g short-hand which does not.
self._proc = subprocess.Popen(cmd_prefix +
- ['record', '--call-graph',
+ ['record', '-g',
'--pid', str(pid), '--output', output_file],
stdout=self._tmp_output_file, stderr=subprocess.STDOUT)
@@ -54,10 +56,12 @@ class _SingleProcessPerfProfiler(object):
'To collect a full profile rerun with '
'"--extra-browser-args=--single-process"')
if self._is_android:
- adb = self._browser_backend.adb.Adb()
- perf_pids = adb.ExtractPid('perf')
- adb.RunShellCommand('kill -SIGINT ' + ' '.join(perf_pids))
- util.WaitFor(lambda: not adb.ExtractPid('perf'), timeout=2)
+ device = self._browser_backend.adb.device()
+ perf_pids = device.old_interface.ExtractPid('perf')
+ device.old_interface.RunShellCommand(
+ 'kill -SIGINT ' + ' '.join(perf_pids))
+ util.WaitFor(lambda: not device.old_interface.ExtractPid('perf'),
+ timeout=2)
self._proc.send_signal(signal.SIGINT)
exit_code = self._proc.wait()
try:
@@ -98,8 +102,8 @@ Try rerunning this script under sudo or setting
List of arguments to be passed to perf to point it to the created symfs.
"""
assert self._is_android
- adb = self._browser_backend.adb.Adb()
- adb.Adb().Pull(self._device_output_file, self._output_file)
+ device = self._browser_backend.adb.device()
+ device.old_interface.Adb().Pull(self._device_output_file, self._output_file)
symfs_dir = os.path.dirname(self._output_file)
host_app_symfs = os.path.join(symfs_dir, 'data', 'app-lib')
if not os.path.exists(host_app_symfs):
@@ -112,7 +116,7 @@ Try rerunning this script under sudo or setting
# the one in the device, and symlink it in the host to match --symfs.
device_dir = filter(
lambda app_lib: app_lib.startswith(self._browser_backend.package),
- adb.RunShellCommand('ls /data/app-lib'))
+ device.old_interface.RunShellCommand('ls /data/app-lib'))
os.symlink(os.path.abspath(
os.path.join(util.GetChromiumSrcDir(),
os.environ.get('CHROMIUM_OUT_DIR', 'out'),
@@ -142,11 +146,12 @@ Try rerunning this script under sudo or setting
'libz.so',
]
for lib in common_system_libs:
- adb.Adb().Pull('/system/lib/%s' % lib, host_system_symfs)
+ device.old_interface.Adb().Pull('/system/lib/%s' % lib,
+ host_system_symfs)
# Pull a copy of the kernel symbols.
host_kallsyms = os.path.join(symfs_dir, 'kallsyms')
if not os.path.exists(host_kallsyms):
- adb.Adb().Pull('/proc/kallsyms', host_kallsyms)
+ device.old_interface.Adb().Pull('/proc/kallsyms', host_kallsyms)
return ['--kallsyms', host_kallsyms, '--symfs', symfs_dir]
def _GetStdOut(self):
@@ -167,9 +172,9 @@ class PerfProfiler(profiler.Profiler):
self._process_profilers = []
if platform_backend.GetOSName() == 'android':
android_prebuilt_profiler_helper.InstallOnDevice(
- browser_backend.adb.Adb(), 'perf')
+ browser_backend.adb.device(), 'perf')
# Make sure kernel pointers are not hidden.
- browser_backend.adb.Adb().SetProtectedFileContents(
+ browser_backend.adb.device().old_interface.SetProtectedFileContents(
'/proc/sys/kernel/kptr_restrict', '0')
for pid, output_file in process_output_file_map.iteritems():
if 'zygote' in output_file:
diff --git a/tools/telemetry/telemetry/core/platform/profiler/perf_profiler_unittest.py b/tools/telemetry/telemetry/core/platform/profiler/perf_profiler_unittest.py
index 3f01825501..5b11f251d4 100644
--- a/tools/telemetry/telemetry/core/platform/profiler/perf_profiler_unittest.py
+++ b/tools/telemetry/telemetry/core/platform/profiler/perf_profiler_unittest.py
@@ -19,7 +19,8 @@ class TestPerfProfiler(unittest.TestCase):
profile_file = os.path.join(
util.GetUnittestDataDir(), 'perf_report_output.txt')
- perf_report_output = open(profile_file, 'r').read()
+ with open(profile_file) as f:
+ perf_report_output = f.read()
mock_popen = simple_mock.MockObject()
mock_popen.ExpectCall('communicate').WillReturn([perf_report_output])
diff --git a/tools/telemetry/telemetry/core/platform/profiler/tcmalloc_heap_profiler.py b/tools/telemetry/telemetry/core/platform/profiler/tcmalloc_heap_profiler.py
index 8820d7d528..e04f65580c 100644
--- a/tools/telemetry/telemetry/core/platform/profiler/tcmalloc_heap_profiler.py
+++ b/tools/telemetry/telemetry/core/platform/profiler/tcmalloc_heap_profiler.py
@@ -34,13 +34,13 @@ class _TCMallocHeapProfilerAndroid(object):
def _SetDeviceProperties(self, properties):
device_configured = False
# This profiler requires adb root to set properties.
- self._browser_backend.adb.Adb().EnableAdbRoot()
+ self._browser_backend.adb.device().old_interface.EnableAdbRoot()
for values in properties.itervalues():
device_property = self._browser_backend.adb.system_properties[values[0]]
if not device_property or not device_property.strip():
self._browser_backend.adb.system_properties[values[0]] = values[1]
device_configured = True
- if not self._browser_backend.adb.Adb().FileExistsOnDevice(
+ if not self._browser_backend.adb.device().old_interface.FileExistsOnDevice(
self._DEFAULT_DEVICE_DIR):
self._browser_backend.adb.RunShellCommand(
'mkdir -p ' + self._DEFAULT_DEVICE_DIR)
@@ -51,7 +51,7 @@ class _TCMallocHeapProfilerAndroid(object):
raise Exception('Device required special config, run again.')
def CollectProfile(self):
- self._browser_backend.adb.Adb().Adb().Pull(
+ self._browser_backend.adb.device().old_interface.Adb().Pull(
self._DEFAULT_DEVICE_DIR, self._output_path)
self._browser_backend.adb.RunShellCommand(
'rm ' + os.path.join(self._DEFAULT_DEVICE_DIR, '*'))
diff --git a/tools/telemetry/telemetry/core/platform/profiler/tcpdump_profiler.py b/tools/telemetry/telemetry/core/platform/profiler/tcpdump_profiler.py
index bbcb2090c1..c472502aee 100644
--- a/tools/telemetry/telemetry/core/platform/profiler/tcpdump_profiler.py
+++ b/tools/telemetry/telemetry/core/platform/profiler/tcpdump_profiler.py
@@ -30,7 +30,7 @@ class _TCPDumpProfilerAndroid(object):
self._adb.RunShellCommand('mkdir -p ' +
os.path.dirname(self._DEVICE_DUMP_FILE))
self._proc = subprocess.Popen(
- ['adb', '-s', self._adb.device(),
+ ['adb', '-s', self._adb.device_serial(),
'shell', android_prebuilt_profiler_helper.GetDevicePath('tcpdump')] +
_TCP_DUMP_BASE_OPTS +
[self._DEVICE_DUMP_FILE])
@@ -45,7 +45,8 @@ class _TCPDumpProfilerAndroid(object):
self._proc.terminate()
host_dump = os.path.join(self._output_path,
os.path.basename(self._DEVICE_DUMP_FILE))
- self._adb.Adb().Adb().Pull(self._DEVICE_DUMP_FILE, host_dump)
+ self._adb.device().old_interface.Adb().Pull(self._DEVICE_DUMP_FILE,
+ host_dump)
print 'TCP dump available at: %s ' % host_dump
print 'Use Wireshark to open it.'
return host_dump
@@ -99,7 +100,7 @@ class TCPDumpProfiler(profiler.Profiler):
browser_backend, platform_backend, output_path, state)
if platform_backend.GetOSName() == 'android':
android_prebuilt_profiler_helper.InstallOnDevice(
- browser_backend.adb.Adb(), 'tcpdump')
+ browser_backend.adb.device(), 'tcpdump')
self._platform_profiler = _TCPDumpProfilerAndroid(
browser_backend.adb, output_path)
else:
diff --git a/tools/telemetry/telemetry/core/platform/profiler/v8_profiler.py b/tools/telemetry/telemetry/core/platform/profiler/v8_profiler.py
index d2b9502ad5..a804cfe24f 100644
--- a/tools/telemetry/telemetry/core/platform/profiler/v8_profiler.py
+++ b/tools/telemetry/telemetry/core/platform/profiler/v8_profiler.py
@@ -37,9 +37,11 @@ class V8Profiler(profiler.Profiler):
# On Android pull the output file to the host.
if self._platform_backend.GetOSName() == 'android':
host_output_file = '%s.log' % self._output_path
- self._browser_backend.adb.Adb().Adb().Pull(output_file, host_output_file)
+ self._browser_backend.adb.device().old_interface.Adb().Pull(
+ output_file, host_output_file)
# Clean the device
- self._browser_backend.adb.Adb().RunShellCommand('rm %s' % output_file)
+ self._browser_backend.adb.device().old_interface.RunShellCommand(
+ 'rm %s' % output_file)
output_file = host_output_file
print 'V8 profile saved as %s' % output_file
print 'To view, open in ' \
diff --git a/tools/telemetry/telemetry/core/tab_unittest.py b/tools/telemetry/telemetry/core/tab_unittest.py
index 3b107b9d62..91bdb51f58 100644
--- a/tools/telemetry/telemetry/core/tab_unittest.py
+++ b/tools/telemetry/telemetry/core/tab_unittest.py
@@ -67,6 +67,11 @@ class TabTest(tab_test_case.TabTestCase):
util.WaitFor(lambda: _IsDocumentVisible(self._tab), timeout=5)
self.assertFalse(_IsDocumentVisible(new_tab))
+ def testTabUrl(self):
+ self.assertEquals(self._tab.url, 'about:blank')
+ self.Navigate('blank.html')
+ self.assertEquals(self._tab.url, self.test_url)
+
def testIsTimelineRecordingRunningTab(self):
self.assertFalse(self._tab.is_timeline_recording_running)
self._tab.StartTimelineRecording()
@@ -95,7 +100,7 @@ class GpuTabTest(tab_test_case.TabTestCase):
super(GpuTabTest, self).setUp()
# Test flaky on mac: http://crbug.com/358664
- @test.Disabled('mac')
+ @test.Disabled('android', 'mac')
def testScreenshot(self):
if not self._tab.screenshot_supported:
logging.warning('Browser does not support screenshots, skipping test.')
diff --git a/tools/telemetry/telemetry/core/webpagereplay.py b/tools/telemetry/telemetry/core/webpagereplay.py
index 92db738736..71e1f35989 100644
--- a/tools/telemetry/telemetry/core/webpagereplay.py
+++ b/tools/telemetry/telemetry/core/webpagereplay.py
@@ -126,7 +126,6 @@ class ReplayServer(object):
self._CheckPath('archive file', self.archive_path)
self._CheckPath('replay script', self.replay_py)
- self.log_fh = None
self.replay_process = None
def _AddDefaultReplayOptions(self):
@@ -165,15 +164,16 @@ class ReplayServer(object):
# Read the ports from the WPR log.
if not self.http_port or not self.https_port or not self.dns_port:
- for line in open(self.log_path).readlines():
- m = port_re.match(line.strip())
- if m:
- if not self.http_port and m.group('protocol') == 'HTTP':
- self.http_port = int(m.group('port'))
- elif not self.https_port and m.group('protocol') == 'HTTPS':
- self.https_port = int(m.group('port'))
- elif not self.dns_port and m.group('protocol') == 'DNS':
- self.dns_port = int(m.group('port'))
+ with open(self.log_path) as f:
+ for line in f.readlines():
+ m = port_re.match(line.strip())
+ if m:
+ if not self.http_port and m.group('protocol') == 'HTTP':
+ self.http_port = int(m.group('port'))
+ elif not self.https_port and m.group('protocol') == 'HTTPS':
+ self.https_port = int(m.group('port'))
+ elif not self.dns_port and m.group('protocol') == 'DNS':
+ self.dns_port = int(m.group('port'))
# Try to connect to the WPR ports.
if self.http_port and self.https_port:
@@ -202,14 +202,17 @@ class ReplayServer(object):
cmd_line = [sys.executable, self.replay_py]
cmd_line.extend(self.replay_options)
cmd_line.append(self.archive_path)
- self.log_fh = self._OpenLogFile()
+
logging.debug('Starting Web-Page-Replay: %s', cmd_line)
- kwargs = {'stdout': self.log_fh, 'stderr': subprocess.STDOUT}
- if sys.platform.startswith('linux') or sys.platform == 'darwin':
- kwargs['preexec_fn'] = ResetInterruptHandler
- self.replay_process = subprocess.Popen(cmd_line, **kwargs)
+ with self._OpenLogFile() as log_fh:
+ kwargs = {'stdout': log_fh, 'stderr': subprocess.STDOUT}
+ if sys.platform.startswith('linux') or sys.platform == 'darwin':
+ kwargs['preexec_fn'] = ResetInterruptHandler
+ self.replay_process = subprocess.Popen(cmd_line, **kwargs)
+
if not self.WaitForStart(30):
- log = open(self.log_path).read()
+ with open(self.log_path) as f:
+ log = f.read()
raise ReplayNotStartedError(
'Web Page Replay failed to start. Log output:\n%s' % log)
@@ -242,8 +245,6 @@ class ReplayServer(object):
except: # pylint: disable=W0702
pass
self.replay_process.wait()
- if self.log_fh:
- self.log_fh.close()
def __enter__(self):
"""Add support for with-statement."""
diff --git a/tools/telemetry/telemetry/page/actions/gesture_action.py b/tools/telemetry/telemetry/page/actions/gesture_action.py
index 09a41fcabb..222125cc7a 100644
--- a/tools/telemetry/telemetry/page/actions/gesture_action.py
+++ b/tools/telemetry/telemetry/page/actions/gesture_action.py
@@ -21,7 +21,10 @@ class GestureAction(page_action.PageAction):
def RunAction(self, page, tab):
runner = action_runner.ActionRunner(None, tab)
- interaction_name = 'Gesture_%s' % self.__class__.__name__
+ if self.wait_action:
+ interaction_name = 'Action_%s' % self.__class__.__name__
+ else:
+ interaction_name = 'Gesture_%s' % self.__class__.__name__
runner.BeginInteraction(interaction_name, [tir_module.IS_SMOOTH])
self.RunGesture(page, tab)
if self.wait_action:
diff --git a/tools/telemetry/telemetry/page/actions/gesture_action_unittest.py b/tools/telemetry/telemetry/page/actions/gesture_action_unittest.py
index 9c6f909d73..637fad7f6c 100644
--- a/tools/telemetry/telemetry/page/actions/gesture_action_unittest.py
+++ b/tools/telemetry/telemetry/page/actions/gesture_action_unittest.py
@@ -2,35 +2,43 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import time
-
from telemetry.page.actions import gesture_action
+from telemetry.page.actions import wait
from telemetry.unittest import tab_test_case
+from telemetry.unittest import simple_mock
class MockGestureAction(gesture_action.GestureAction):
"""Mock gesture action that simply sleeps for a specified amount of time."""
- def __init__(self, attributes=None):
+ def __init__(self, sleep_func, attributes=None):
+ self.sleep_func = sleep_func
super(MockGestureAction, self).__init__(attributes)
def RunGesture(self, page, tab):
duration = getattr(self, 'duration', 2)
- time.sleep(duration)
+ self.sleep_func(duration)
class GestureActionTest(tab_test_case.TabTestCase):
def testGestureAction(self):
"""Test that GestureAction.RunAction() calls RunGesture()."""
- action = MockGestureAction({ 'duration': 1 })
+ mock_timer = simple_mock.MockTimer()
+ action = MockGestureAction(mock_timer.Sleep, { 'duration': 1 })
- start_time = time.time()
action.RunAction(None, self._tab)
- self.assertGreaterEqual(time.time() - start_time, 1.0)
+ self.assertEqual(mock_timer.GetTime(), 1)
def testWaitAfter(self):
- action = MockGestureAction({ 'duration': 1,
- 'wait_after': { 'seconds': 1 } })
-
- start_time = time.time()
- action.RunAction(None, self._tab)
- self.assertGreaterEqual(time.time() - start_time, 2.0)
+ mock_timer = simple_mock.MockTimer()
+ real_time_sleep = wait.time.sleep
+ wait.time.sleep = mock_timer.Sleep
+
+ try:
+ action = MockGestureAction(mock_timer.Sleep,
+ { 'duration': 1,
+ 'wait_after': { 'seconds': 1 } })
+
+ action.RunAction(None, self._tab)
+ self.assertEqual(mock_timer.GetTime(), 2)
+ finally:
+ wait.time.sleep = real_time_sleep
diff --git a/tools/telemetry/telemetry/page/actions/loop_unittest.py b/tools/telemetry/telemetry/page/actions/loop_unittest.py
index 6d256a15f9..b547961de3 100644
--- a/tools/telemetry/telemetry/page/actions/loop_unittest.py
+++ b/tools/telemetry/telemetry/page/actions/loop_unittest.py
@@ -2,10 +2,12 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+from telemetry import decorators
from telemetry.core import util
from telemetry.page.actions import loop
from telemetry.unittest import tab_test_case
+
AUDIO_1_LOOP_CHECK = 'window.__hasEventCompleted("#audio_1", "loop");'
VIDEO_1_LOOP_CHECK = 'window.__hasEventCompleted("#video_1", "loop");'
@@ -16,6 +18,7 @@ class LoopActionTest(tab_test_case.TabTestCase):
tab_test_case.TabTestCase.setUp(self)
self.Navigate('video_test.html')
+ @decorators.Disabled('android')
def testLoopWithNoSelector(self):
"""Tests that with no selector Loop action loops first media element."""
data = {'selector': '#video_1', 'loop_count': 2}
@@ -26,6 +29,7 @@ class LoopActionTest(tab_test_case.TabTestCase):
self.assertTrue(self._tab.EvaluateJavaScript(VIDEO_1_LOOP_CHECK))
self.assertFalse(self._tab.EvaluateJavaScript(AUDIO_1_LOOP_CHECK))
+ @decorators.Disabled('android')
def testLoopWithAllSelector(self):
"""Tests that Loop action loops all video elements with selector='all'."""
data = {'selector': 'all', 'loop_count': 2}
@@ -39,6 +43,7 @@ class LoopActionTest(tab_test_case.TabTestCase):
self.assertTrue(self._tab.EvaluateJavaScript(VIDEO_1_LOOP_CHECK))
self.assertTrue(self._tab.EvaluateJavaScript(AUDIO_1_LOOP_CHECK))
+ @decorators.Disabled('android')
def testLoopWaitForLoopTimeout(self):
"""Tests that wait_for_loop timeouts if video does not loop."""
data = {'selector': '#video_1',
@@ -49,6 +54,7 @@ class LoopActionTest(tab_test_case.TabTestCase):
self.assertFalse(self._tab.EvaluateJavaScript(VIDEO_1_LOOP_CHECK))
self.assertRaises(util.TimeoutException, action.RunAction, None, self._tab)
+ @decorators.Disabled('android')
def testLoopWithoutLoopCount(self):
"""Tests that loop action fails with no loop count."""
data = {}
diff --git a/tools/telemetry/telemetry/page/actions/play_unittest.py b/tools/telemetry/telemetry/page/actions/play_unittest.py
index 6f4f747850..6989c68b86 100644
--- a/tools/telemetry/telemetry/page/actions/play_unittest.py
+++ b/tools/telemetry/telemetry/page/actions/play_unittest.py
@@ -2,10 +2,12 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+from telemetry import decorators
from telemetry.core import util
from telemetry.page.actions import play
from telemetry.unittest import tab_test_case
+
AUDIO_1_PLAYING_CHECK = 'window.__hasEventCompleted("#audio_1", "playing");'
VIDEO_1_PLAYING_CHECK = 'window.__hasEventCompleted("#video_1", "playing");'
VIDEO_1_ENDED_CHECK = 'window.__hasEventCompleted("#video_1", "ended");'
@@ -17,6 +19,7 @@ class PlayActionTest(tab_test_case.TabTestCase):
tab_test_case.TabTestCase.setUp(self)
self.Navigate('video_test.html')
+ @decorators.Disabled('android')
def testPlayWithNoSelector(self):
"""Tests that with no selector Play action plays first video element."""
data = {'wait_for_playing': True}
@@ -30,6 +33,7 @@ class PlayActionTest(tab_test_case.TabTestCase):
self.assertTrue(self._tab.EvaluateJavaScript(VIDEO_1_PLAYING_CHECK))
self.assertFalse(self._tab.EvaluateJavaScript(AUDIO_1_PLAYING_CHECK))
+ @decorators.Disabled('android')
def testPlayWithVideoSelector(self):
"""Tests that Play action plays video element matching selector."""
data = {'selector': '#video_1', 'wait_for_playing': True}
@@ -43,6 +47,7 @@ class PlayActionTest(tab_test_case.TabTestCase):
self.assertTrue(self._tab.EvaluateJavaScript(VIDEO_1_PLAYING_CHECK))
self.assertFalse(self._tab.EvaluateJavaScript(AUDIO_1_PLAYING_CHECK))
+ @decorators.Disabled('android')
def testPlayWithAllSelector(self):
"""Tests that Play action plays all video elements with selector='all'."""
data = {'selector': 'all', 'wait_for_playing': True}
@@ -68,6 +73,7 @@ class PlayActionTest(tab_test_case.TabTestCase):
self.assertFalse(self._tab.EvaluateJavaScript(VIDEO_1_PLAYING_CHECK))
self.assertRaises(util.TimeoutException, action.RunAction, None, self._tab)
+ @decorators.Disabled('android')
def testPlayWaitForEnded(self):
"""Tests that wait_for_ended waits for video to end."""
data = {'selector': '#video_1', 'wait_for_ended': True}
diff --git a/tools/telemetry/telemetry/page/actions/wait_unittest.py b/tools/telemetry/telemetry/page/actions/wait_unittest.py
index 5355f380f0..b2214a5fa4 100644
--- a/tools/telemetry/telemetry/page/actions/wait_unittest.py
+++ b/tools/telemetry/telemetry/page/actions/wait_unittest.py
@@ -2,11 +2,10 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import time
-
from telemetry.core import util
from telemetry.page.actions import wait
from telemetry.unittest import tab_test_case
+from telemetry.unittest import simple_mock
class WaitActionTest(tab_test_case.TabTestCase):
@@ -16,21 +15,40 @@ class WaitActionTest(tab_test_case.TabTestCase):
self._tab.EvaluateJavaScript('document.location.pathname;'),
'/blank.html')
- i = wait.WaitAction({ 'condition': 'duration', 'seconds': 1 })
+ mock_timer = simple_mock.MockTimer()
+ real_time_sleep = wait.time.sleep
+ wait.time.sleep = mock_timer.Sleep
+
+ try:
+ i = wait.WaitAction({ 'condition': 'duration', 'seconds': 1 })
- start_time = time.time()
- i.RunAction(None, self._tab)
- self.assertTrue(time.time() - start_time >= 1.0)
+ i.RunAction(None, self._tab)
+ self.assertEqual(mock_timer.GetTime(), 1)
+ finally:
+ wait.time.sleep = real_time_sleep
def testWaitActionTimeout(self):
- wait_action = wait.WaitAction({
- 'condition': 'javascript',
- 'javascript': '1 + 1 === 3',
- 'timeout': 1
- })
-
- start_time = time.time()
- self.assertRaises(
- util.TimeoutException,
- lambda: wait_action.RunAction(None, self._tab))
- self.assertTrue(time.time() - start_time < 5)
+ mock_timer = simple_mock.MockTimer()
+ real_wait_time_sleep = wait.time.sleep
+ real_util_time_sleep = util.time.sleep
+ real_util_time_time = util.time.time
+
+ wait.time.sleep = mock_timer.Sleep
+ util.time.sleep = mock_timer.Sleep
+ util.time.time = mock_timer.GetTime
+
+ try:
+ wait_action = wait.WaitAction({
+ 'condition': 'javascript',
+ 'javascript': '1 + 1 === 3',
+ 'timeout': 1
+ })
+
+ self.assertRaises(
+ util.TimeoutException,
+ lambda: wait_action.RunAction(None, self._tab))
+ self.assertLess(mock_timer.GetTime(), 5)
+ finally:
+ wait.time.sleep = real_wait_time_sleep
+ util.time.sleep = real_util_time_sleep
+ util.time.time = real_util_time_time
diff --git a/tools/telemetry/telemetry/page/block_page_measurement_results_unittest.py b/tools/telemetry/telemetry/page/block_page_measurement_results_unittest.py
index a0355fc9d0..234aa01596 100644
--- a/tools/telemetry/telemetry/page/block_page_measurement_results_unittest.py
+++ b/tools/telemetry/telemetry/page/block_page_measurement_results_unittest.py
@@ -12,14 +12,11 @@ BlockPageMeasurementResults = \
block_page_measurement_results.BlockPageMeasurementResults
def _MakePageSet():
- return page_set.PageSet.FromDict({
- "description": "hello",
- "archive_path": "foo.wpr",
- "pages": [
- {"url": "http://www.foo.com/"},
- {"url": "http://www.bar.com/"}
- ]
- }, os.path.dirname(__file__))
+ ps = page_set.PageSet(file_path=os.path.dirname(__file__))
+ ps.AddPageWithDefaultRunNavigate('http://www.foo.com/')
+ ps.AddPageWithDefaultRunNavigate('http://www.bar.com/')
+ return ps
+
class NonPrintingBlockPageMeasurementResults(BlockPageMeasurementResults):
def __init__(self, *args):
diff --git a/tools/telemetry/telemetry/page/buildbot_page_measurement_results_unittest.py b/tools/telemetry/telemetry/page/buildbot_page_measurement_results_unittest.py
index ba7d8994ad..6b46619204 100644
--- a/tools/telemetry/telemetry/page/buildbot_page_measurement_results_unittest.py
+++ b/tools/telemetry/telemetry/page/buildbot_page_measurement_results_unittest.py
@@ -11,15 +11,11 @@ from telemetry.value import list_of_scalar_values
from telemetry.value import scalar
def _MakePageSet():
- return page_set.PageSet.FromDict({
- "description": "hello",
- "archive_path": "foo.wpr",
- "pages": [
- {"url": "http://www.foo.com/"},
- {"url": "http://www.bar.com/"},
- {"url": "http://www.baz.com/"}
- ]
- }, os.path.dirname(__file__))
+ ps = page_set.PageSet(file_path=os.path.dirname(__file__))
+ ps.AddPageWithDefaultRunNavigate('http://www.foo.com/')
+ ps.AddPageWithDefaultRunNavigate('http://www.bar.com/')
+ ps.AddPageWithDefaultRunNavigate('http://www.baz.com/')
+ return ps
class SummarySavingPageMeasurementResults(
buildbot_page_measurement_results.BuildbotPageMeasurementResults):
diff --git a/tools/telemetry/telemetry/page/csv_page_measurement_results_unittest.py b/tools/telemetry/telemetry/page/csv_page_measurement_results_unittest.py
index bedbf646bf..3891e57965 100644
--- a/tools/telemetry/telemetry/page/csv_page_measurement_results_unittest.py
+++ b/tools/telemetry/telemetry/page/csv_page_measurement_results_unittest.py
@@ -10,13 +10,10 @@ from telemetry.page import csv_page_measurement_results
from telemetry.page import page_set
def _MakePageSet():
- return page_set.PageSet.FromDict({
- "description": "hello",
- "pages": [
- {"url": "http://www.foo.com/"},
- {"url": "http://www.bar.com/"}
- ]
- }, os.path.dirname(__file__))
+ ps = page_set.PageSet(file_path=os.path.dirname(__file__))
+ ps.AddPageWithDefaultRunNavigate('http://www.foo.com/')
+ ps.AddPageWithDefaultRunNavigate('http://www.bar.com/')
+ return ps
class NonPrintingCsvPageMeasurementResults(
csv_page_measurement_results.CsvPageMeasurementResults):
diff --git a/tools/telemetry/telemetry/page/html_page_measurement_results.py b/tools/telemetry/telemetry/page/html_page_measurement_results.py
index c3f76c0044..cb2fc4110d 100644
--- a/tools/telemetry/telemetry/page/html_page_measurement_results.py
+++ b/tools/telemetry/telemetry/page/html_page_measurement_results.py
@@ -54,16 +54,19 @@ class HtmlPageMeasurementResults(
return lastchange.FetchVersionInfo(None).revision
def _GetHtmlTemplate(self):
- return open(_TEMPLATE_HTML_PATH, 'r').read()
+ with open(_TEMPLATE_HTML_PATH) as f:
+ return f.read()
def _GetPlugins(self):
plugins = ''
for p in _PLUGINS:
- plugins += open(os.path.join(util.GetChromiumSrcDir(), *p), 'r').read()
+ with open(os.path.join(util.GetChromiumSrcDir(), *p)) as f:
+ plugins += f.read()
return plugins
def _GetUnitJson(self):
- return open(os.path.join(util.GetChromiumSrcDir(), *_UNIT_JSON), 'r').read()
+ with open(os.path.join(util.GetChromiumSrcDir(), *_UNIT_JSON)) as f:
+ return f.read()
def _ReadExistingResults(self, output_stream):
results_html = output_stream.read()
diff --git a/tools/telemetry/telemetry/page/html_page_measurement_results_unittest.py b/tools/telemetry/telemetry/page/html_page_measurement_results_unittest.py
index e94139cf84..25001db312 100644
--- a/tools/telemetry/telemetry/page/html_page_measurement_results_unittest.py
+++ b/tools/telemetry/telemetry/page/html_page_measurement_results_unittest.py
@@ -10,15 +10,11 @@ from telemetry.page import page_set
def _MakePageSet():
- return page_set.PageSet.FromDict({
- "description": "hello",
- "archive_path": "foo.wpr",
- "pages": [
- {"url": "http://www.foo.com/"},
- {"url": "http://www.bar.com/"},
- {"url": "http://www.baz.com/"}
- ]
- }, os.path.dirname(__file__))
+ ps = page_set.PageSet(file_path=os.path.dirname(__file__))
+ ps.AddPageWithDefaultRunNavigate('http://www.foo.com/')
+ ps.AddPageWithDefaultRunNavigate('http://www.bar.com/')
+ ps.AddPageWithDefaultRunNavigate('http://www.baz.com/')
+ return ps
class DeterministicHtmlPageMeasurementResults(
diff --git a/tools/telemetry/telemetry/page/page.py b/tools/telemetry/telemetry/page/page.py
index c76b6d290a..344175abea 100644
--- a/tools/telemetry/telemetry/page/page.py
+++ b/tools/telemetry/telemetry/page/page.py
@@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import inspect
import os
import re
import urlparse
@@ -13,6 +14,10 @@ class Page(object):
def __init__(self, url, page_set=None, base_dir=None):
self.url = url
self._page_set = page_set
+ # Default value of base_dir is the directory of the file that defines the
+ # class of this page instace.
+ if base_dir is None:
+ base_dir = os.path.dirname(inspect.getfile(self.__class__))
self._base_dir = base_dir
# These attributes can be set dynamically by the page.
@@ -38,9 +43,9 @@ class Page(object):
# share property through a common ancestor class.
# TODO(nednguyen): remove this when crbug.com/239179 is marked fixed
def __getattr__(self, name):
- # Disable this property on python page_set
+ # Disable this property on not dict based page_set
if (self.page_set and hasattr(self.page_set, name) and
- not self.page_set.file_path.endswith('.py')):
+ self.page_set.IsDictBasedPageSet()):
return getattr(self.page_set, name)
raise AttributeError(
'%r object has no attribute %r' % (self.__class__, name))
diff --git a/tools/telemetry/telemetry/page/page_measurement_results_unittest.py b/tools/telemetry/telemetry/page/page_measurement_results_unittest.py
index 381788043f..b5b35dfdd8 100644
--- a/tools/telemetry/telemetry/page/page_measurement_results_unittest.py
+++ b/tools/telemetry/telemetry/page/page_measurement_results_unittest.py
@@ -10,15 +10,11 @@ from telemetry.page import perf_tests_helper
from telemetry.value import scalar
def _MakePageSet():
- return page_set.PageSet.FromDict({
- "description": "hello",
- "archive_path": "foo.wpr",
- "pages": [
- {"url": "http://www.bar.com/"},
- {"url": "http://www.baz.com/"},
- {"url": "http://www.foo.com/"}
- ]
- }, os.path.dirname(__file__))
+ ps = page_set.PageSet(file_path=os.path.dirname(__file__))
+ ps.AddPageWithDefaultRunNavigate("http://www.bar.com/")
+ ps.AddPageWithDefaultRunNavigate("http://www.baz.com/")
+ ps.AddPageWithDefaultRunNavigate("http://www.foo.com/")
+ return ps
class NonPrintingPageMeasurementResults(
page_measurement_results.PageMeasurementResults):
diff --git a/tools/telemetry/telemetry/page/page_measurement_unittest.py b/tools/telemetry/telemetry/page/page_measurement_unittest.py
index 987c900e83..3754fb30a7 100644
--- a/tools/telemetry/telemetry/page/page_measurement_unittest.py
+++ b/tools/telemetry/telemetry/page/page_measurement_unittest.py
@@ -76,7 +76,7 @@ class PageMeasurementUnitTest(
self.assertEquals(0, len(all_results.failures))
def testGotQueryParams(self):
- ps = self.CreatePageSet('file://blank.html?foo=1')
+ ps = self.CreatePageSetFromFileInUnittestDataDir('blank.html?foo=1')
measurement = MeasurementQueryParams()
all_results = self.RunMeasurement(measurement, ps, options=self._options)
self.assertEquals(0, len(all_results.failures))
diff --git a/tools/telemetry/telemetry/page/page_measurement_unittest_base.py b/tools/telemetry/telemetry/page/page_measurement_unittest_base.py
index 2ef2f71cd3..412cd4e192 100644
--- a/tools/telemetry/telemetry/page/page_measurement_unittest_base.py
+++ b/tools/telemetry/telemetry/page/page_measurement_unittest_base.py
@@ -7,26 +7,34 @@ import unittest
from telemetry.core import util
from telemetry.page import page_runner
from telemetry.page import page as page_module
-from telemetry.page import page_set
+from telemetry.page import page_set as page_set_module
from telemetry.page import page_test
from telemetry.page import test_expectations
+# pylint: disable=W0401,W0614
+from telemetry.page.actions.all_page_actions import *
from telemetry.unittest import options_for_unittests
+
+class BasicTestPage(page_module.PageWithDefaultRunNavigate):
+ def __init__(self, url, page_set, base_dir):
+ super(BasicTestPage, self).__init__(url, page_set, base_dir)
+
+ def RunSmoothness(self, action_runner):
+ action_runner.RunAction(ScrollAction())
+
class PageMeasurementUnitTestBase(unittest.TestCase):
"""unittest.TestCase-derived class to help in the construction of unit tests
for a measurement."""
def CreatePageSetFromFileInUnittestDataDir(self, test_filename):
- return self.CreatePageSet('file://' + test_filename)
+ ps = self.CreateEmptyPageSet()
+ page = BasicTestPage('file://' + test_filename, ps, base_dir=ps.base_dir)
+ ps.AddPage(page)
+ return ps
- def CreatePageSet(self, test_filename):
+ def CreateEmptyPageSet(self):
base_dir = util.GetUnittestDataDir()
- ps = page_set.PageSet(file_path=base_dir)
- page = page_module.Page(test_filename, ps, base_dir=base_dir)
- setattr(page, 'RunSmoothness', {'action': 'scroll'})
- setattr(page, 'RunRepaint',
- { "action": "repaint_continuously", "seconds": 2 })
- ps.pages.append(page)
+ ps = page_set_module.PageSet(file_path=base_dir)
return ps
def RunMeasurement(self, measurement, ps,
diff --git a/tools/telemetry/telemetry/page/page_runner.py b/tools/telemetry/telemetry/page/page_runner.py
index 21a4ce19ec..76927f0ffe 100644
--- a/tools/telemetry/telemetry/page/page_runner.py
+++ b/tools/telemetry/telemetry/page/page_runner.py
@@ -246,7 +246,9 @@ def _LogStackTrace(title, browser):
def _PrepareAndRunPage(test, page_set, expectations, finder_options,
browser_options, page, credentials_path,
possible_browser, results, state):
- if browser_options.wpr_mode != wpr_modes.WPR_RECORD:
+ if finder_options.use_live_sites:
+ possible_browser.finder_options.browser_options.wpr_mode = wpr_modes.WPR_OFF
+ elif browser_options.wpr_mode != wpr_modes.WPR_RECORD:
possible_browser.finder_options.browser_options.wpr_mode = (
wpr_modes.WPR_REPLAY
if page.archive_path and os.path.isfile(page.archive_path)
diff --git a/tools/telemetry/telemetry/page/page_runner_unittest.py b/tools/telemetry/telemetry/page/page_runner_unittest.py
index 53740c4e7d..37d5f57ca8 100644
--- a/tools/telemetry/telemetry/page/page_runner_unittest.py
+++ b/tools/telemetry/telemetry/page/page_runner_unittest.py
@@ -7,6 +7,7 @@ import os
import tempfile
import unittest
+from telemetry import decorators
from telemetry.core import browser_finder
from telemetry.core import exceptions
from telemetry.core import user_agent
@@ -62,7 +63,7 @@ class PageRunnerTests(unittest.TestCase):
def testHandlingOfCrashedTab(self):
ps = page_set.PageSet()
expectations = test_expectations.TestExpectations()
- page1 = page_module.Page('chrome://crash', ps)
+ page1 = page_module.PageWithDefaultRunNavigate('chrome://crash', ps)
ps.pages.append(page1)
class Test(page_test.PageTest):
@@ -200,6 +201,7 @@ class PageRunnerTests(unittest.TestCase):
self.assertEquals(0, len(results.successes))
self.assertEquals(0, len(results.failures))
+ @decorators.Disabled('win')
def testPagesetRepeat(self):
ps = page_set.PageSet()
expectations = test_expectations.TestExpectations()
@@ -230,7 +232,8 @@ class PageRunnerTests(unittest.TestCase):
results.PrintSummary()
self.assertEquals(4, len(results.successes))
self.assertEquals(0, len(results.failures))
- stdout = open(output_file).read()
+ with open(output_file) as f:
+ stdout = f.read()
self.assertIn('RESULT metric_by_url: blank.html= [1,3] unit', stdout)
self.assertIn('RESULT metric_by_url: green_rect.html= [2,4] unit', stdout)
self.assertIn('*RESULT metric: metric= [1,2,3,4] unit', stdout)
@@ -446,3 +449,40 @@ class PageRunnerTests(unittest.TestCase):
SetUpPageRunnerArguments(options)
page_runner.Run(test, ps, expectations, options)
assert test.did_call_clean_up
+
+ def TestUseLiveSitesFlag(self, options, expected_is_page_from_archive):
+ ps = page_set.PageSet(
+ file_path=util.GetUnittestDataDir(),
+ archive_data_file='data/archive_blank.json')
+ ps.pages.append(page_module.Page(
+ 'file://blank.html', ps, base_dir=ps.base_dir))
+ expectations = test_expectations.TestExpectations()
+
+ class ArchiveTest(page_measurement.PageMeasurement):
+ def __init__(self):
+ super(ArchiveTest, self).__init__()
+ self.is_page_from_archive = False
+
+ def WillNavigateToPage(self, page, tab):
+ self.is_page_from_archive = (
+ tab.browser._wpr_server is not None) # pylint: disable=W0212
+
+ def MeasurePage(self, _, __, results):
+ pass
+
+ test = ArchiveTest()
+ page_runner.Run(test, ps, expectations, options)
+ self.assertEquals(expected_is_page_from_archive, test.is_page_from_archive)
+
+ def testUseLiveSitesFlagSet(self):
+ options = options_for_unittests.GetCopy()
+ options.output_format = 'none'
+ options.use_live_sites = True
+ SetUpPageRunnerArguments(options)
+ self.TestUseLiveSitesFlag(options, expected_is_page_from_archive=False)
+
+ def testUseLiveSitesFlagUnset(self):
+ options = options_for_unittests.GetCopy()
+ options.output_format = 'none'
+ SetUpPageRunnerArguments(options)
+ self.TestUseLiveSitesFlag(options, expected_is_page_from_archive=True)
diff --git a/tools/telemetry/telemetry/page/page_set.py b/tools/telemetry/telemetry/page/page_set.py
index b746ca455e..9471653328 100644
--- a/tools/telemetry/telemetry/page/page_set.py
+++ b/tools/telemetry/telemetry/page/page_set.py
@@ -5,7 +5,6 @@
import csv
import inspect
import os
-import sys
from telemetry.core import util
from telemetry.page import page as page_module
@@ -30,10 +29,18 @@ class PageSetError(Exception):
class PageSet(object):
- def __init__(self, file_path='', description='', archive_data_file='',
+ def __init__(self, file_path=None, description='', archive_data_file='',
credentials_path=None, user_agent_type=None,
make_javascript_deterministic=True, startup_url='',
serving_dirs=None):
+ # The default value of file_path is location of the file that define this
+ # page set instance's class.
+ if file_path is None:
+ file_path = inspect.getfile(self.__class__)
+ # Turn pyc file into py files if we can
+ if file_path.endswith('.pyc') and os.path.exists(file_path[:-1]):
+ file_path = file_path[:-1]
+
self.file_path = file_path
# These attributes can be set dynamically by the page set.
self.description = description
@@ -44,12 +51,23 @@ class PageSet(object):
self._wpr_archive_info = None
self.startup_url = startup_url
self.pages = []
- if serving_dirs:
- self.serving_dirs = serving_dirs
- else:
- self.serving_dirs = set()
+ self.serving_dirs = set()
+ serving_dirs = [] if serving_dirs is None else serving_dirs
+ # Makes sure that page_set's serving_dirs are absolute paths
+ for sd in serving_dirs:
+ if os.path.isabs(sd):
+ self.serving_dirs.add(os.path.realpath(sd))
+ else:
+ self.serving_dirs.add(os.path.realpath(os.path.join(self.base_dir, sd)))
+ self._is_dict_based_page_set = False
+
+
+ # TODO(nednguyen): Remove this when crbug.com/239179 is marked fixed
+ def IsDictBasedPageSet(self):
+ return self._is_dict_based_page_set
def _InitializeFromDict(self, attributes):
+ self._is_dict_based_page_set = True
if attributes:
for k, v in attributes.iteritems():
if k in LEGACY_NAME_CONVERSION_DICT:
@@ -121,15 +139,6 @@ class PageSet(object):
raise PageSetError("Pageset file needs to contain exactly 1 pageset class"
" with prefix 'PageSet'")
page_set = page_set_classes[0]()
- page_set.file_path = file_path
- # Makes sure that page_set's serving_dirs are absolute paths
- if page_set.serving_dirs:
- abs_serving_dirs = set()
- for serving_dir in page_set.serving_dirs:
- abs_serving_dirs.add(os.path.realpath(os.path.join(
- page_set.base_dir, # pylint: disable=W0212
- serving_dir)))
- page_set.serving_dirs = abs_serving_dirs
for page in page_set.pages:
page_class = page.__class__
@@ -141,10 +150,6 @@ class PageSet(object):
raise PageSetError("""Definition of Run<...> method of all
pages in %s must be in the form of def Run<...>(self, action_runner):"""
% file_path)
- # Set page's base_dir attribute.
- page_file_path = sys.modules[page_class.__module__].__file__
- page._base_dir = os.path.dirname(page_file_path)
-
return page_set
@staticmethod
diff --git a/tools/telemetry/telemetry/page/page_set_unittest.py b/tools/telemetry/telemetry/page/page_set_unittest.py
index c5e9e46bd5..cc15d7dce4 100644
--- a/tools/telemetry/telemetry/page/page_set_unittest.py
+++ b/tools/telemetry/telemetry/page/page_set_unittest.py
@@ -11,17 +11,14 @@ from telemetry.page import page_set
class TestPageSet(unittest.TestCase):
+
def testServingDirs(self):
directory_path = tempfile.mkdtemp()
try:
- ps = page_set.PageSet.FromDict({
- 'serving_dirs': ['a/b'],
- 'pages': [
- {'url': 'file://c/test.html'},
- {'url': 'file://c/test.js'},
- {'url': 'file://d/e/../test.html'},
- ]
- }, directory_path)
+ ps = page_set.PageSet(serving_dirs=['a/b'], file_path=directory_path)
+ ps.AddPageWithDefaultRunNavigate('file://c/test.html')
+ ps.AddPageWithDefaultRunNavigate('file://c/test.js')
+ ps.AddPageWithDefaultRunNavigate('file://d/e/../test.html')
finally:
os.rmdir(directory_path)
@@ -31,6 +28,19 @@ class TestPageSet(unittest.TestCase):
self.assertEquals(ps[0].serving_dir, os.path.join(real_directory_path, 'c'))
self.assertEquals(ps[2].serving_dir, os.path.join(real_directory_path, 'd'))
+ def testAbsoluteServingDir(self):
+ directory_path = tempfile.mkdtemp()
+ try:
+ absolute_dir = os.path.join(directory_path, 'a', 'b')
+ ps = page_set.PageSet(file_path=directory_path,
+ serving_dirs=['', directory_path, absolute_dir])
+ real_directory_path = os.path.realpath(directory_path)
+ real_absolute_dir = os.path.realpath(absolute_dir)
+ self.assertEquals(ps.serving_dirs, set([real_directory_path,
+ real_absolute_dir]))
+ finally:
+ os.rmdir(directory_path)
+
def testRenamingCompoundActions(self):
ps = page_set.PageSet.FromDict({
'serving_dirs': ['a/b'],
@@ -68,7 +78,6 @@ class TestPageSet(unittest.TestCase):
self.assertEquals(ps.pages[0].RunNavigateSteps, {'action': 'navigate1'})
self.assertEquals(ps.pages[1].RunNavigateSteps, {'action': 'navigate2'})
-
def testSuccesfulPythonPageSetLoading(self):
test_pps_dir = os.path.join(util.GetUnittestDataDir(), 'test_page_set.py')
pps = page_set.PageSet.FromFile(test_pps_dir)
@@ -106,3 +115,15 @@ class TestPageSet(unittest.TestCase):
self.assertEqual(
os.path.normpath(os.path.join(
util.GetUnittestDataDir(), 'pages/foo.html')), external_page.file_path)
+
+ def testDictBasedPageSet(self):
+ dict_ps = page_set.PageSet.FromDict({
+ 'description': 'hello',
+ 'archive_path': 'foo.wpr',
+ 'pages': [{'url': 'file://../../otherdir/foo/'}]
+ }, os.path.dirname(__file__))
+ self.assertTrue(dict_ps.IsDictBasedPageSet())
+
+ test_pps_dir = os.path.join(util.GetUnittestDataDir(), 'test_page_set.py')
+ python_ps = page_set.PageSet.FromFile(test_pps_dir)
+ self.assertFalse(python_ps.IsDictBasedPageSet())
diff --git a/tools/telemetry/telemetry/page/page_test_results_unittest.py b/tools/telemetry/telemetry/page/page_test_results_unittest.py
index 25886067b1..01aed88c0f 100644
--- a/tools/telemetry/telemetry/page/page_test_results_unittest.py
+++ b/tools/telemetry/telemetry/page/page_test_results_unittest.py
@@ -18,15 +18,10 @@ class NonPrintingPageTestResults(
class PageTestResultsTest(unittest.TestCase):
def setUp(self):
- self.page_set = page_set.PageSet.FromDict({
- "description": "hello",
- "archive_path": "foo.wpr",
- "pages": [
- {"url": "http://www.bar.com/"},
- {"url": "http://www.baz.com/"},
- {"url": "http://www.foo.com/"}
- ]
- }, os.path.dirname(__file__))
+ self.page_set = page_set.PageSet(file_path=os.path.dirname(__file__))
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.bar.com/")
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.baz.com/")
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.foo.com/")
@property
def pages(self):
diff --git a/tools/telemetry/telemetry/page/page_unittest.py b/tools/telemetry/telemetry/page/page_unittest.py
index 92be799971..61fabc07bd 100644
--- a/tools/telemetry/telemetry/page/page_unittest.py
+++ b/tools/telemetry/telemetry/page/page_unittest.py
@@ -42,113 +42,76 @@ class TestPage(unittest.TestCase):
(os.altsep and apage.file_path_url.endswith(os.altsep)))
def testSort(self):
- ps = page_set.PageSet.FromDict({
- 'description': 'hello',
- 'archive_path': 'foo.wpr',
- 'pages': [
- {'url': 'http://www.foo.com/'},
- {'url': 'http://www.bar.com/'}
- ]
- }, os.path.dirname(__file__))
+ ps = page_set.PageSet(file_path=os.path.dirname(__file__))
+ ps.AddPageWithDefaultRunNavigate('http://www.foo.com/')
+ ps.AddPageWithDefaultRunNavigate('http://www.bar.com/')
+
pages = [ps.pages[0], ps.pages[1]]
pages.sort()
self.assertEquals([ps.pages[1], ps.pages[0]],
pages)
def testGetUrlBaseDirAndFileForUrlBaseDir(self):
- ps = page_set.PageSet.FromDict({
- 'description': 'hello',
- 'archive_path': 'foo.wpr',
- 'serving_dirs': ['../somedir/'],
- 'pages': [
- {'url': 'file://../otherdir/file.html'}
- ]}, 'basedir/')
+ ps = page_set.PageSet(file_path='basedir/', serving_dirs=['../somedir/'])
+ ps.AddPageWithDefaultRunNavigate('file://../otherdir/file.html')
self.assertPathEqual(ps[0].file_path, 'otherdir/file.html')
def testDisplayUrlForHttp(self):
- ps = page_set.PageSet.FromDict({
- 'description': 'hello',
- 'archive_path': 'foo.wpr',
- 'pages': [
- {'url': 'http://www.foo.com/'},
- {'url': 'http://www.bar.com/'}
- ]
- }, os.path.dirname(__file__))
+ ps = page_set.PageSet(file_path=os.path.dirname(__file__))
+ ps.AddPageWithDefaultRunNavigate('http://www.foo.com/')
+ ps.AddPageWithDefaultRunNavigate('http://www.bar.com/')
+
self.assertEquals(ps[0].display_name, 'http://www.foo.com/')
self.assertEquals(ps[1].display_name, 'http://www.bar.com/')
def testDisplayUrlForHttps(self):
- ps = page_set.PageSet.FromDict({
- 'description': 'hello',
- 'archive_path': 'foo.wpr',
- 'pages': [
- {'url': 'http://www.foo.com/'},
- {'url': 'https://www.bar.com/'}
- ]
- }, os.path.dirname(__file__))
+ ps = page_set.PageSet(file_path=os.path.dirname(__file__))
+ ps.AddPageWithDefaultRunNavigate('http://www.foo.com/')
+ ps.AddPageWithDefaultRunNavigate('https://www.bar.com/')
+
self.assertEquals(ps[0].display_name, 'http://www.foo.com/')
self.assertEquals(ps[1].display_name, 'https://www.bar.com/')
def testDisplayUrlForFile(self):
- ps = page_set.PageSet.FromDict({
- 'description': 'hello',
- 'archive_path': 'foo.wpr',
- 'pages': [
- {'url': 'file://../../otherdir/foo.html'},
- {'url': 'file://../../otherdir/bar.html'},
- ]
- }, os.path.dirname(__file__))
+ ps = page_set.PageSet(file_path=os.path.dirname(__file__))
+ ps.AddPageWithDefaultRunNavigate('file://../../otherdir/foo.html')
+ ps.AddPageWithDefaultRunNavigate('file://../../otherdir/bar.html')
+
self.assertEquals(ps[0].display_name, 'foo.html')
self.assertEquals(ps[1].display_name, 'bar.html')
def testDisplayUrlForFilesDifferingBySuffix(self):
- ps = page_set.PageSet.FromDict({
- 'description': 'hello',
- 'archive_path': 'foo.wpr',
- 'pages': [
- {'url': 'file://../../otherdir/foo.html'},
- {'url': 'file://../../otherdir/foo1.html'},
- ]
- }, os.path.dirname(__file__))
+ ps = page_set.PageSet(file_path=os.path.dirname(__file__))
+ ps.AddPageWithDefaultRunNavigate('file://../../otherdir/foo.html')
+ ps.AddPageWithDefaultRunNavigate('file://../../otherdir/foo1.html')
+
self.assertEquals(ps[0].display_name, 'foo.html')
self.assertEquals(ps[1].display_name, 'foo1.html')
def testDisplayUrlForFileOfDifferentPaths(self):
- ps = page_set.PageSet.FromDict({
- 'description': 'hello',
- 'archive_path': 'foo.wpr',
- 'pages': [
- {'url': 'file://../../somedir/foo.html'},
- {'url': 'file://../../otherdir/bar.html'},
- ]
- }, os.path.dirname(__file__))
+ ps = page_set.PageSet(file_path=os.path.dirname(__file__))
+ ps.AddPageWithDefaultRunNavigate('file://../../somedir/foo.html')
+ ps.AddPageWithDefaultRunNavigate('file://../../otherdir/bar.html')
+
self.assertEquals(ps[0].display_name, 'somedir/foo.html')
self.assertEquals(ps[1].display_name, 'otherdir/bar.html')
def testDisplayUrlForFileDirectories(self):
- ps = page_set.PageSet.FromDict({
- 'description': 'hello',
- 'archive_path': 'foo.wpr',
- 'pages': [
- {'url': 'file://../../otherdir/foo/'},
- {'url': 'file://../../otherdir/bar/'},
- ]
- }, os.path.dirname(__file__))
+ ps = page_set.PageSet(file_path=os.path.dirname(__file__))
+ ps.AddPageWithDefaultRunNavigate('file://../../otherdir/foo')
+ ps.AddPageWithDefaultRunNavigate('file://../../otherdir/bar')
+
self.assertEquals(ps[0].display_name, 'foo')
self.assertEquals(ps[1].display_name, 'bar')
def testDisplayUrlForSingleFile(self):
- ps = page_set.PageSet.FromDict({
- 'description': 'hello',
- 'archive_path': 'foo.wpr',
- 'pages': [{'url': 'file://../../otherdir/foo.html'}]
- }, os.path.dirname(__file__))
+ ps = page_set.PageSet(file_path=os.path.dirname(__file__))
+ ps.AddPageWithDefaultRunNavigate('file://../../otherdir/foo.html')
+
self.assertEquals(ps[0].display_name, 'foo.html')
def testDisplayUrlForSingleDirectory(self):
- ps = page_set.PageSet.FromDict({
- 'description': 'hello',
- 'archive_path': 'foo.wpr',
- 'pages': [{'url': 'file://../../otherdir/foo/'}]
- }, os.path.dirname(__file__))
+ ps = page_set.PageSet(file_path=os.path.dirname(__file__))
+ ps.AddPageWithDefaultRunNavigate('file://../../otherdir/foo')
+
self.assertEquals(ps[0].display_name, 'foo')
diff --git a/tools/telemetry/telemetry/unittest/simple_mock.py b/tools/telemetry/telemetry/unittest/simple_mock.py
index e468460f23..b54dcac723 100644
--- a/tools/telemetry/telemetry/unittest/simple_mock.py
+++ b/tools/telemetry/telemetry/unittest/simple_mock.py
@@ -93,3 +93,14 @@ class MockObject(object):
return expected_call.return_value
handler.is_hook = True
setattr(self, func_name, handler)
+
+
+class MockTimer(object):
+ def __init__(self):
+ self._elapsed_time = 0
+
+ def Sleep(self, time):
+ self._elapsed_time += time
+
+ def GetTime(self):
+ return self._elapsed_time
diff --git a/tools/telemetry/telemetry/unittest/system_stub.py b/tools/telemetry/telemetry/unittest/system_stub.py
index 36798f96b5..c314bb30f8 100644
--- a/tools/telemetry/telemetry/unittest/system_stub.py
+++ b/tools/telemetry/telemetry/unittest/system_stub.py
@@ -51,8 +51,6 @@ class Override(object):
class AdbCommandsModuleStub(object):
-# adb not even found
-# android_browser_finder not returning
class AdbCommandsStub(object):
def __init__(self, module, device):
self._module = module
@@ -74,6 +72,9 @@ class AdbCommandsModuleStub(object):
def IsUserBuild(self):
return False
+ def WaitForDevicePm(self):
+ pass
+
def __init__(self):
self.attached_devices = []
self.shell_command_handlers = {}
@@ -146,6 +147,10 @@ class OpenFunctionStub(object):
class OsModuleStub(object):
+ class OsEnvironModuleStub(object):
+ def get(self, _):
+ return None
+
class OsPathModuleStub(object):
def __init__(self, sys_module):
self.sys = sys_module
@@ -194,6 +199,7 @@ class OsModuleStub(object):
def __init__(self, sys_module=sys):
self.path = OsModuleStub.OsPathModuleStub(sys_module)
+ self.environ = OsModuleStub.OsEnvironModuleStub()
self.display = ':0'
self.local_app_data = None
self.program_files = None
diff --git a/tools/telemetry/telemetry/unittest/tab_test_case.py b/tools/telemetry/telemetry/unittest/tab_test_case.py
index 6cc742afd1..5d31cd62e6 100644
--- a/tools/telemetry/telemetry/unittest/tab_test_case.py
+++ b/tools/telemetry/telemetry/unittest/tab_test_case.py
@@ -14,6 +14,7 @@ class TabTestCase(unittest.TestCase):
def __init__(self, *args):
self._extra_browser_args = []
self.test_file_path = None
+ self.test_url = None
super(TabTestCase, self).__init__(*args)
def setUp(self):
@@ -55,6 +56,6 @@ class TabTestCase(unittest.TestCase):
"""
self._browser.SetHTTPServerDirectories(util.GetUnittestDataDir())
self.test_file_path = os.path.join(util.GetUnittestDataDir(), filename)
- self._tab.Navigate(self._browser.http_server.UrlOf(self.test_file_path),
- script_to_evaluate_on_commit)
+ self.test_url = self._browser.http_server.UrlOf(self.test_file_path)
+ self._tab.Navigate(self.test_url, script_to_evaluate_on_commit)
self._tab.WaitForDocumentReadyStateToBeComplete()
diff --git a/tools/telemetry/telemetry/value/histogram_unittest.py b/tools/telemetry/telemetry/value/histogram_unittest.py
index faf50abf44..ba75b5e4fa 100644
--- a/tools/telemetry/telemetry/value/histogram_unittest.py
+++ b/tools/telemetry/telemetry/value/histogram_unittest.py
@@ -10,15 +10,10 @@ from telemetry.value import histogram as histogram_module
class TestBase(unittest.TestCase):
def setUp(self):
- self.page_set = page_set.PageSet.FromDict({
- "description": "hello",
- "archive_path": "foo.wpr",
- "pages": [
- {"url": "http://www.bar.com/"},
- {"url": "http://www.baz.com/"},
- {"url": "http://www.foo.com/"}
- ]
- }, os.path.dirname(__file__))
+ self.page_set = page_set.PageSet(file_path=os.path.dirname(__file__))
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.bar.com/")
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.baz.com/")
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.foo.com/")
@property
def pages(self):
diff --git a/tools/telemetry/telemetry/value/list_of_scalar_values_unittest.py b/tools/telemetry/telemetry/value/list_of_scalar_values_unittest.py
index c54ca26306..88f42e6411 100644
--- a/tools/telemetry/telemetry/value/list_of_scalar_values_unittest.py
+++ b/tools/telemetry/telemetry/value/list_of_scalar_values_unittest.py
@@ -10,15 +10,10 @@ from telemetry.value import list_of_scalar_values
class TestBase(unittest.TestCase):
def setUp(self):
- self.page_set = page_set.PageSet.FromDict({
- "description": "hello",
- "archive_path": "foo.wpr",
- "pages": [
- {"url": "http://www.bar.com/"},
- {"url": "http://www.baz.com/"},
- {"url": "http://www.foo.com/"}
- ]
- }, os.path.dirname(__file__))
+ self.page_set = page_set.PageSet(file_path=os.path.dirname(__file__))
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.bar.com/")
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.baz.com/")
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.foo.com/")
@property
def pages(self):
diff --git a/tools/telemetry/telemetry/value/list_of_string_values.py b/tools/telemetry/telemetry/value/list_of_string_values.py
new file mode 100644
index 0000000000..02900e3640
--- /dev/null
+++ b/tools/telemetry/telemetry/value/list_of_string_values.py
@@ -0,0 +1,91 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry import value as value_module
+
+class ListOfStringValues(value_module.Value):
+ def __init__(self, page, name, units, values,
+ important=True, same_page_merge_policy=value_module.CONCATENATE):
+ super(ListOfStringValues, self).__init__(page, name, units, important)
+ assert len(values) > 0
+ assert isinstance(values, list)
+ for v in values:
+ assert isinstance(v, basestring)
+ self.values = values
+ self.same_page_merge_policy = same_page_merge_policy
+
+ def __repr__(self):
+ if self.page:
+ page_name = self.page.url
+ else:
+ page_name = None
+ if self.same_page_merge_policy == value_module.CONCATENATE:
+ merge_policy = 'CONCATENATE'
+ else:
+ merge_policy = 'PICK_FIRST'
+ return ('ListOfStringValues(%s, %s, %s, %s, ' +
+ 'important=%s, same_page_merge_policy=%s)') % (
+ page_name,
+ self.name, self.units,
+ repr(self.values),
+ self.important,
+ merge_policy)
+
+ def GetBuildbotDataType(self, output_context):
+ if self._IsImportantGivenOutputIntent(output_context):
+ return 'default'
+ return 'unimportant'
+
+ def GetBuildbotValue(self):
+ return self.values
+
+ def GetRepresentativeNumber(self):
+ return None
+
+ def GetRepresentativeString(self):
+ return repr(self.values)
+
+ def IsMergableWith(self, that):
+ return (super(ListOfStringValues, self).IsMergableWith(that) and
+ self.same_page_merge_policy == that.same_page_merge_policy)
+
+ @classmethod
+ def MergeLikeValuesFromSamePage(cls, values):
+ assert len(values) > 0
+ v0 = values[0]
+
+ if v0.same_page_merge_policy == value_module.PICK_FIRST:
+ return ListOfStringValues(
+ v0.page, v0.name, v0.units,
+ values[0].values,
+ important=v0.important,
+ same_page_merge_policy=v0.same_page_merge_policy)
+
+ assert v0.same_page_merge_policy == value_module.CONCATENATE
+ all_values = []
+ for v in values:
+ all_values.extend(v.values)
+ return ListOfStringValues(
+ v0.page, v0.name, v0.units,
+ all_values,
+ important=v0.important,
+ same_page_merge_policy=v0.same_page_merge_policy)
+
+ @classmethod
+ def MergeLikeValuesFromDifferentPages(cls, values,
+ group_by_name_suffix=False):
+ assert len(values) > 0
+ v0 = values[0]
+ all_values = []
+ for v in values:
+ all_values.extend(v.values)
+ if not group_by_name_suffix:
+ name = v0.name
+ else:
+ name = v0.name_suffix
+ return ListOfStringValues(
+ None, name, v0.units,
+ all_values,
+ important=v0.important,
+ same_page_merge_policy=v0.same_page_merge_policy)
diff --git a/tools/telemetry/telemetry/value/list_of_string_values_unittest.py b/tools/telemetry/telemetry/value/list_of_string_values_unittest.py
new file mode 100644
index 0000000000..a2dacc1059
--- /dev/null
+++ b/tools/telemetry/telemetry/value/list_of_string_values_unittest.py
@@ -0,0 +1,78 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+import os
+import unittest
+
+from telemetry import value
+from telemetry.page import page_set
+from telemetry.value import list_of_string_values
+
+class TestBase(unittest.TestCase):
+ def setUp(self):
+ self.page_set = page_set.PageSet(file_path=os.path.dirname(__file__))
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.bar.com/")
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.baz.com/")
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.foo.com/")
+
+ @property
+ def pages(self):
+ return self.page_set.pages
+
+class ListOfStringValuesTest(TestBase):
+ def testListSamePageMergingWithSamePageConcatenatePolicy(self):
+ page0 = self.pages[0]
+ v0 = list_of_string_values.ListOfStringValues(
+ page0, 'x', 'label',
+ ['L1','L2'], same_page_merge_policy=value.CONCATENATE)
+ v1 = list_of_string_values.ListOfStringValues(
+ page0, 'x', 'label',
+ ['L3','L4'], same_page_merge_policy=value.CONCATENATE)
+ self.assertTrue(v1.IsMergableWith(v0))
+
+ vM = (list_of_string_values.ListOfStringValues.
+ MergeLikeValuesFromSamePage([v0, v1]))
+ self.assertEquals(page0, vM.page)
+ self.assertEquals('x', vM.name)
+ self.assertEquals('label', vM.units)
+ self.assertEquals(value.CONCATENATE, vM.same_page_merge_policy)
+ self.assertEquals(True, vM.important)
+ self.assertEquals(['L1', 'L2', 'L3', 'L4'], vM.values)
+
+ def testListSamePageMergingWithPickFirstPolicy(self):
+ page0 = self.pages[0]
+ v0 = list_of_string_values.ListOfStringValues(
+ page0, 'x', 'label',
+ ['L1','L2'], same_page_merge_policy=value.PICK_FIRST)
+ v1 = list_of_string_values.ListOfStringValues(
+ page0, 'x', 'label',
+ ['L3','L4'], same_page_merge_policy=value.PICK_FIRST)
+ self.assertTrue(v1.IsMergableWith(v0))
+
+ vM = (list_of_string_values.ListOfStringValues.
+ MergeLikeValuesFromSamePage([v0, v1]))
+ self.assertEquals(page0, vM.page)
+ self.assertEquals('x', vM.name)
+ self.assertEquals('label', vM.units)
+ self.assertEquals(value.PICK_FIRST, vM.same_page_merge_policy)
+ self.assertEquals(True, vM.important)
+ self.assertEquals(['L1', 'L2'], vM.values)
+
+ def testListDifferentPageMerging(self):
+ page0 = self.pages[0]
+ v0 = list_of_string_values.ListOfStringValues(
+ page0, 'x', 'label',
+ ['L1', 'L2'], same_page_merge_policy=value.PICK_FIRST)
+ v1 = list_of_string_values.ListOfStringValues(
+ page0, 'x', 'label',
+ ['L3', 'L4'], same_page_merge_policy=value.PICK_FIRST)
+ self.assertTrue(v1.IsMergableWith(v0))
+
+ vM = (list_of_string_values.ListOfStringValues.
+ MergeLikeValuesFromDifferentPages([v0, v1]))
+ self.assertEquals(None, vM.page)
+ self.assertEquals('x', vM.name)
+ self.assertEquals('label', vM.units)
+ self.assertEquals(value.PICK_FIRST, vM.same_page_merge_policy)
+ self.assertEquals(True, vM.important)
+ self.assertEquals(['L1', 'L2', 'L3', 'L4'], vM.values)
diff --git a/tools/telemetry/telemetry/value/merge_values_unittest.py b/tools/telemetry/telemetry/value/merge_values_unittest.py
index 781595c40f..72d3b318c2 100644
--- a/tools/telemetry/telemetry/value/merge_values_unittest.py
+++ b/tools/telemetry/telemetry/value/merge_values_unittest.py
@@ -11,15 +11,10 @@ from telemetry.value import scalar
class TestBase(unittest.TestCase):
def setUp(self):
- self.page_set = page_set.PageSet.FromDict({
- "description": "hello",
- "archive_path": "foo.wpr",
- "pages": [
- {"url": "http://www.bar.com/"},
- {"url": "http://www.baz.com/"},
- {"url": "http://www.foo.com/"}
- ]
- }, os.path.dirname(__file__))
+ self.page_set = page_set.PageSet(file_path=os.path.dirname(__file__))
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.bar.com/")
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.baz.com/")
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.foo.com/")
@property
def pages(self):
diff --git a/tools/telemetry/telemetry/value/scalar_unittest.py b/tools/telemetry/telemetry/value/scalar_unittest.py
index 3666c8baa1..31412521a7 100644
--- a/tools/telemetry/telemetry/value/scalar_unittest.py
+++ b/tools/telemetry/telemetry/value/scalar_unittest.py
@@ -10,15 +10,10 @@ from telemetry.value import scalar
class TestBase(unittest.TestCase):
def setUp(self):
- self.page_set = page_set.PageSet.FromDict({
- "description": "hello",
- "archive_path": "foo.wpr",
- "pages": [
- {"url": "http://www.bar.com/"},
- {"url": "http://www.baz.com/"},
- {"url": "http://www.foo.com/"}
- ]
- }, os.path.dirname(__file__))
+ self.page_set = page_set.PageSet(file_path=os.path.dirname(__file__))
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.bar.com/")
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.baz.com/")
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.foo.com/")
@property
def pages(self):
diff --git a/tools/telemetry/telemetry/value/string.py b/tools/telemetry/telemetry/value/string.py
new file mode 100644
index 0000000000..f413f8b8e9
--- /dev/null
+++ b/tools/telemetry/telemetry/value/string.py
@@ -0,0 +1,68 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry import value as value_module
+from telemetry.value import list_of_string_values
+
+class StringValue(value_module.Value):
+ def __init__(self, page, name, units, value, important=True):
+ """A single value (float, integer or string) result from a test.
+
+ A test that output a hash of the content in a page might produce a
+ string value:
+ StringValue(page, 'page_hash', 'hash', '74E377FF')
+ """
+ super(StringValue, self).__init__(page, name, units, important)
+ assert isinstance(value, basestring)
+ self.value = value
+
+ def __repr__(self):
+ if self.page:
+ page_name = self.page.url
+ else:
+ page_name = None
+ return 'ScalarValue(%s, %s, %s, %s, important=%s)' % (
+ page_name,
+ self.name, self.units,
+ self.value,
+ self.important)
+
+ def GetBuildbotDataType(self, output_context):
+ if self._IsImportantGivenOutputIntent(output_context):
+ return 'default'
+ return 'unimportant'
+
+ def GetBuildbotValue(self):
+ # Buildbot's print_perf_results method likes to get lists for all values,
+ # even when they are scalar, so list-ize the return value.
+ return [self.value]
+
+ def GetRepresentativeNumber(self):
+ return self.value
+
+ def GetRepresentativeString(self):
+ return str(self.value)
+
+ @classmethod
+ def MergeLikeValuesFromSamePage(cls, values):
+ assert len(values) > 0
+ v0 = values[0]
+ return list_of_string_values.ListOfStringValues(
+ v0.page, v0.name, v0.units,
+ [v.value for v in values],
+ important=v0.important)
+
+ @classmethod
+ def MergeLikeValuesFromDifferentPages(cls, values,
+ group_by_name_suffix=False):
+ assert len(values) > 0
+ v0 = values[0]
+ if not group_by_name_suffix:
+ name = v0.name
+ else:
+ name = v0.name_suffix
+ return list_of_string_values.ListOfStringValues(
+ None, name, v0.units,
+ [v.value for v in values],
+ important=v0.important)
diff --git a/tools/telemetry/telemetry/value/string_unittest.py b/tools/telemetry/telemetry/value/string_unittest.py
new file mode 100644
index 0000000000..e50a4dfbc9
--- /dev/null
+++ b/tools/telemetry/telemetry/value/string_unittest.py
@@ -0,0 +1,61 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+import os
+import unittest
+
+from telemetry import value
+from telemetry.page import page_set
+from telemetry.value import string
+
+class TestBase(unittest.TestCase):
+ def setUp(self):
+ self.page_set = page_set.PageSet(file_path=os.path.dirname(__file__))
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.bar.com/")
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.baz.com/")
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.foo.com/")
+
+ @property
+ def pages(self):
+ return self.page_set.pages
+
+class StringValueTest(TestBase):
+ def testBuildbotValueType(self):
+ page0 = self.pages[0]
+ v = string.StringValue(page0, 'x', 'label', 'L1', important=True)
+ self.assertEquals('default', v.GetBuildbotDataType(
+ value.COMPUTED_PER_PAGE_SUMMARY_OUTPUT_CONTEXT))
+ self.assertEquals(['L1'], v.GetBuildbotValue())
+ self.assertEquals(('x_by_url', page0.display_name),
+ v.GetBuildbotMeasurementAndTraceNameForPerPageResult())
+
+ v = string.StringValue(page0, 'x', 'label', 'L1', important=False)
+ self.assertEquals(
+ 'unimportant',
+ v.GetBuildbotDataType(value.COMPUTED_PER_PAGE_SUMMARY_OUTPUT_CONTEXT))
+
+ def testStringSamePageMerging(self):
+ page0 = self.pages[0]
+ v0 = string.StringValue(page0, 'x', 'label', 'L1')
+ v1 = string.StringValue(page0, 'x', 'label', 'L2')
+ self.assertTrue(v1.IsMergableWith(v0))
+
+ vM = string.StringValue.MergeLikeValuesFromSamePage([v0, v1])
+ self.assertEquals(page0, vM.page)
+ self.assertEquals('x', vM.name)
+ self.assertEquals('label', vM.units)
+ self.assertEquals(True, vM.important)
+ self.assertEquals(['L1', 'L2'], vM.values)
+
+ def testStringDifferentSiteMerging(self):
+ page0 = self.pages[0]
+ page1 = self.pages[1]
+ v0 = string.StringValue(page0, 'x', 'label', 'L1')
+ v1 = string.StringValue(page1, 'x', 'label', 'L2')
+
+ vM = string.StringValue.MergeLikeValuesFromDifferentPages([v0, v1])
+ self.assertEquals(None, vM.page)
+ self.assertEquals('x', vM.name)
+ self.assertEquals('label', vM.units)
+ self.assertEquals(True, vM.important)
+ self.assertEquals(['L1', 'L2'], vM.values)
diff --git a/tools/telemetry/telemetry/value/value_unittest_.py b/tools/telemetry/telemetry/value/value_unittest_.py
index 77e2569ab9..20468b4620 100644
--- a/tools/telemetry/telemetry/value/value_unittest_.py
+++ b/tools/telemetry/telemetry/value/value_unittest_.py
@@ -9,15 +9,10 @@ from telemetry.page import page_set
class TestBase(unittest.TestCase):
def setUp(self):
- self.page_set = page_set.PageSet.FromDict({
- "description": "hello",
- "archive_path": "foo.wpr",
- "pages": [
- {"url": "http://www.bar.com/"},
- {"url": "http://www.baz.com/"},
- {"url": "http://www.foo.com/"}
- ]
- }, os.path.dirname(__file__))
+ self.page_set = page_set.PageSet(file_path=os.path.dirname(__file__))
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.bar.com/")
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.baz.com/")
+ self.page_set.AddPageWithDefaultRunNavigate("http://www.foo.com/")
@property
def pages(self):
diff --git a/tools/telemetry/unittest_data/data/archive_blank.json b/tools/telemetry/unittest_data/data/archive_blank.json
new file mode 100644
index 0000000000..d6568ada4e
--- /dev/null
+++ b/tools/telemetry/unittest_data/data/archive_blank.json
@@ -0,0 +1,8 @@
+{
+ "description": "Describes the Web Page Replay archives for a page set. Don't edit by hand! Use record_wpr for updating.",
+ "archives": {
+ "archive_blank_000.wpr": [
+ "blank.html"
+ ]
+ }
+} \ No newline at end of file
diff --git a/tools/telemetry/unittest_data/data/archive_blank_000.wpr.sha1 b/tools/telemetry/unittest_data/data/archive_blank_000.wpr.sha1
new file mode 100644
index 0000000000..badfd21770
--- /dev/null
+++ b/tools/telemetry/unittest_data/data/archive_blank_000.wpr.sha1
@@ -0,0 +1 @@
+03a05fcccf9a3d354226d95b95363c74a06ec72a \ No newline at end of file
diff --git a/tools/valgrind/browser_wrapper_win.py b/tools/valgrind/browser_wrapper_win.py
index b855e80d6d..ee0a961286 100644
--- a/tools/valgrind/browser_wrapper_win.py
+++ b/tools/valgrind/browser_wrapper_win.py
@@ -14,7 +14,7 @@ import subprocess
testcase_name = None
for arg in sys.argv:
- m = re.match("\-\-test\-name=(.*)", arg)
+ m = re.match("\-\-gtest_filter=(.*)", arg)
if m:
assert testcase_name is None
testcase_name = m.groups()[0]
diff --git a/tools/valgrind/chrome_tests.py b/tools/valgrind/chrome_tests.py
index 79d4fce097..0947701fc4 100755
--- a/tools/valgrind/chrome_tests.py
+++ b/tools/valgrind/chrome_tests.py
@@ -363,6 +363,18 @@ class ChromeTests:
def TestViews(self):
return self.SimpleTest("views", "views_unittests")
+ def TestCloudPrint(self):
+ return self.SimpleTest("cloud_print", "cloud_print_unittests")
+
+ def TestCacheInvalidation(self):
+ return self.SimpleTest("cacheinvalidation", "cacheinvalidation_unittests")
+
+ def TestAddressInput(self):
+ return self.SimpleTest("addressinput", "libaddressinput_unittests")
+
+ def TestPhoneNumber(self):
+ return self.SimpleTest("phonenumber", "libphonenumber_unittests")
+
# Valgrind timeouts are in seconds.
UI_VALGRIND_ARGS = ["--timeout=14400", "--trace_children", "--indirect"]
# UI test timeouts are in milliseconds.
@@ -585,6 +597,14 @@ class ChromeTests:
"unit": TestUnit, "unit_tests": TestUnit,
"url": TestURL, "url_unittests": TestURL,
"views": TestViews, "views_unittests": TestViews,
+ "cloud_print": TestCloudPrint,
+ "cloud_print_unittests": TestCloudPrint,
+ "cacheinvalidation": TestCacheInvalidation,
+ "cacheinvalidation_unittests": TestCacheInvalidation,
+ "addressinput": TestAddressInput,
+ "libaddressinput_unittests": TestAddressInput,
+ "phonenumber": TestPhoneNumber,
+ "libphonenumber_unittests": TestPhoneNumber,
}
diff --git a/tools/valgrind/drmemory/suppressions.txt b/tools/valgrind/drmemory/suppressions.txt
index 3096679949..689fa1bce6 100644
--- a/tools/valgrind/drmemory/suppressions.txt
+++ b/tools/valgrind/drmemory/suppressions.txt
@@ -437,7 +437,6 @@ skia.dll!SkGlyphCache::findImage
skia.dll!D1G_NoBounder_RectClip
skia.dll!SkDraw::drawPosText
skia.dll!SkBitmapDevice::drawPosText
-skia.dll!SkCanvas::drawPosText
HANDLE LEAK
name=http://crbug.com/346993
@@ -447,11 +446,12 @@ KERNEL32.dll!DuplicateHandle*
base.dll!base::`anonymous namespace'::ThreadFunc
KERNEL32.dll!BaseThreadInitThunk
-HANDLE LEAK
-name=http://crbug.com/366246
-system call NtDuplicateObject
-KERNELBASE.dll!DuplicateHandle
-KERNEL32.dll!DuplicateHandle
-base.dll!base::SharedMemory::ShareToProcessCommon
-content.dll!content::ResourceBuffer::ShareToProcess
-content.dll!content::AsyncResourceHandler::OnReadCompleted
+UNADDRESSABLE ACCESS
+name=http://crbug.com/42043-uninit
+...
+QuickTime.qts!*
+
+GDI USAGE ERROR
+name=http://crbug.com/42043-gdi
+...
+QuickTime.qts!*
diff --git a/tools/valgrind/drmemory/suppressions_full.txt b/tools/valgrind/drmemory/suppressions_full.txt
index 99822b65ce..f2ba6a7d9d 100644
--- a/tools/valgrind/drmemory/suppressions_full.txt
+++ b/tools/valgrind/drmemory/suppressions_full.txt
@@ -586,6 +586,7 @@ name=http://code.google.com/p/drmemory/issues/detail?id=512 b
...
*!Encrypt*
+# TODO(bruening): remove these once we have v8 bitfields handled
UNINITIALIZED READ
name=http://code.google.com/p/drmemory/issues/detail?id=513 a
*!v8*
@@ -601,6 +602,19 @@ name=http://code.google.com/p/drmemory/issues/detail?id=513 c
...
*!v8*
+# We have seen some cases (not yet understood: crbug.com/364146) where v8.dll
+# has no symbols. These are all on the bots using component build, so we use
+# v8.dll. TODO(bruening): remove these once we've fixed the symbol issue.
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=513 d
+v8.dll!*
+
+UNINITIALIZED READ
+name=http://code.google.com/p/drmemory/issues/detail?id=513 e
+<not in a module>
+...
+v8.dll!*
+
UNINITIALIZED READ
name=http://code.google.com/p/drmemory/issues/detail?id=546
...
@@ -1952,12 +1966,12 @@ osmesa.dll!LINTERP
osmesa.dll!INTERP_4F
osmesa.dll!_tnl_generic_interp
-UNADDRESSABLE ACCESS
+UNINITIALIZED READ
name=bug_340752
...
-blink_heap.dll!WebCore::ThreadState::visitStack
-blink_heap.dll!WebCore::ThreadState::trace
-blink_heap.dll!WebCore::ThreadState::visitRoots
+*!WebCore::ThreadState::visitStack
+*!WebCore::ThreadState::trace
+*!WebCore::ThreadState::visitRoots
UNINITIALIZED READ
name=bug_343663
@@ -2001,8 +2015,16 @@ content.dll!content::BrowserAccessibilityManagerWin::*
...
*!*::UpdateNode
+# There are so many osmesa errors we have to suppress (mostly the unpack_RGB*
+# variety) that it's a performance hit. We avoid that by requesting
+# whole-module suppression
+# (see https://code.google.com/p/drmemory/issues/detail?id=1529).
+UNINITIALIZED READ
+name=bug_347967_all_osmesa
+osmesa.dll!*
+
UNINITIALIZED READ
-name=bug_347976
+name=bug_347967
osmesa.dll!unpack_RGB*888
osmesa.dll!_mesa_unpack_rgba_row
osmesa.dll!slow_read_rgba_pixels
@@ -2014,7 +2036,7 @@ gpu.dll!gpu::CommandParser::ProcessCommand
gpu.dll!gpu::GpuScheduler::PutChanged
UNINITIALIZED READ
-name=bug_347976,bug_348357
+name=bug_347967,bug_348357
osmesa.dll!clip_span
osmesa.dll!_swrast_write_rgba_span
osmesa.dll!general_triangle
@@ -2065,3 +2087,17 @@ name=bug_364146
...
v8.dll!*
net_with_v8.dll!net::ProxyResolverV8::Context::*
+
+UNINITIALIZED READ
+name=bug_334448
+*!CLD2::UTF8GenericReplaceInternal
+*!CLD2::UTF8GenericReplace
+*!CLD2::ScriptScanner::LowerScriptSpan
+*!CLD2::ScriptScanner::GetOneScriptSpanLower
+*!CLD2::DetectLanguageSummaryV2
+*!CLD2::DetectLanguageSummary
+
+UNINITIALIZED READ
+name=bug_42043
+...
+QuickTime.qts!*
diff --git a/tools/valgrind/drmemory_analyze.py b/tools/valgrind/drmemory_analyze.py
index 7a82b461f3..915c601c25 100755
--- a/tools/valgrind/drmemory_analyze.py
+++ b/tools/valgrind/drmemory_analyze.py
@@ -122,9 +122,13 @@ class DrMemoryAnalyzer:
self.ReadLine()
while self.line_.strip() != "":
line = self.line_.strip()
- (count, name) = re.match(" *([0-9]+)x(?: \(leaked .*\))?: (.*)",
+ (count, name) = re.match(" *([0-9\?]+)x(?: \(.*?\))?: (.*)",
line).groups()
- count = int(count)
+ if (count == "?"):
+ # Whole-module have no count available: assume 1
+ count = 1
+ else:
+ count = int(count)
self.used_suppressions[name] += count
self.ReadLine()
diff --git a/tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory_win32.txt b/tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory_win32.txt
index 8e06c50184..81abcb74af 100644
--- a/tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory_win32.txt
+++ b/tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory_win32.txt
@@ -2,3 +2,17 @@
RenderViewImplTest.SendProgressCompletionUpdates
WebRtcBrowserTests/WebRtcBrowserTest.Ca*
WebRtcBrowserTests/WebRtcBrowserTest.NegotiateOfferWithBLine*
+# Dr.Memory-i#1528-c#2
+BrowserPluginHostTest.InputMethod
+DeviceInertialSensorBrowserTest.OrientationNullTestWithAlert
+DeviceInertialSensorBrowserTest.MotionNullTestWithAlert
+PluginTest.GetURLRequestFailWrite
+PluginTest.MultipleInstancesSyncCalls
+PluginTest.EnsureScriptingWorksInDestroy
+TouchActionBrowserTest.TouchActionNone,
+RenderViewImplTest.DontIgnoreBackAfterNavEntryLimit
+RenderViewImplTest.SendSwapOutACK
+ResourceFetcherTests.ResourceFetcherDidFail
+OpenedByDOMTest.CrossProcessPopup
+# Dr.Memory-i#1528-c#4
+RenderViewImplTest.StaleNavigationsIgnored
diff --git a/tools/valgrind/gtest_exclude/content_browsertests.gtest-tsan.txt b/tools/valgrind/gtest_exclude/content_browsertests.gtest-tsan.txt
deleted file mode 100644
index 62c84338e6..0000000000
--- a/tools/valgrind/gtest_exclude/content_browsertests.gtest-tsan.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-# Flaky on Linux tsan bots. crbug.com/349634 and crbug.com/358390
-WebRtcBrowserTest.EstablishAudioVideoCallAndMeasureOutputLevel
-WebRtcBrowserTests/WebRtcBrowserTest.EstablishAudioVideoCallAndVerifyMutingWorks/0
-WebRtcBrowserTests/WebRtcBrowserTest.EstablishAudioVideoCallAndVerifyMutingWorks/1
diff --git a/tools/valgrind/memcheck/suppressions.txt b/tools/valgrind/memcheck/suppressions.txt
index 35be718af1..107d6b6fbd 100644
--- a/tools/valgrind/memcheck/suppressions.txt
+++ b/tools/valgrind/memcheck/suppressions.txt
@@ -5876,24 +5876,6 @@
fun:_ZN2v88internalL19HandleApiCallHelperILb0EEEPNS0_11MaybeObjectENS0_12_GLOBAL__N_116BuiltinArgumentsILNS0_21BuiltinExtraArgumentsE1EEEPNS0_7IsolateE
}
{
- bug_301900
- Memcheck:Unaddressable
- fun:_Z7GrCrashPKc
- fun:_ZN19GrInOrderDrawBuffer6onDrawERKN12GrDrawTarget8DrawInfoE
- fun:_ZN12GrDrawTarget20drawIndexedInstancesE15GrPrimitiveTypeiiiPK6SkRect
- fun:_ZN19GrInOrderDrawBuffer10onDrawRectERK6SkRectPK8SkMatrixPS1_S5_
- fun:_ZN12GrDrawTarget8drawRectERK6SkRectPK8SkMatrixPS1_S5_
- ...
- fun:_ZN13SkGPipeReader8playbackEPKvmjPm
- fun:_ZN22DeferredPipeController8playbackEb
- fun:_ZN14DeferredDevice20flushPendingCommandsE12PlaybackMode
- fun:_ZN14DeferredDevice14onAccessBitmapEv
- fun:_ZN12SkBaseDevice12accessBitmapEb
- fun:_ZN12SkBaseDevice10readPixelsEP8SkBitmapiiN8SkCanvas10Config8888E
- fun:_ZN8SkCanvas10readPixelsEP8SkBitmapiiNS_10Config8888E
- fun:_ZN7WebCore15GraphicsContext10readPixelsEP8SkBitmapiiN8SkCanvas10Config8888E
-}
-{
bug_309468
Memcheck:Leak
fun:_Znw*
@@ -6363,3 +6345,38 @@
fun:_ZN7WebCore20IDBFactoryV8InternalL10openMethodERKN2v820FunctionCallbackInfoINS1_5ValueEEE
fun:_ZN7WebCore20IDBFactoryV8InternalL18openMethodCallbackERKN2v820FunctionCallbackInfoINS1_5ValueEEE
}
+{
+ bug_367809_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4mojo6common13HandleWatcher5StartERKNS_6HandleEjmRKN4base8CallbackIFviEEE
+ fun:_ZN4mojo8internal12_GLOBAL__N_19AsyncWaitEP15MojoAsyncWaiterjjmPFvPviES4_
+ fun:_ZN4mojo8internal9Connector14WaitToReadMoreEv
+ fun:_ZN4mojo8internal9ConnectorC1ENS_16ScopedHandleBaseINS_17MessagePipeHandleEEEP15MojoAsyncWaiter
+ fun:_ZN4mojo8internal6RouterC1ENS_16ScopedHandleBaseINS_17MessagePipeHandleEEEP15MojoAsyncWaiter
+ fun:_ZN4mojo9RemotePtrINS_11ShellClientEE5StateC1ENS_16ScopedHandleBaseINS_17MessagePipeHandleEEEPNS_5ShellEPNS_12ErrorHandlerEP15MojoAsyncWaiter
+ fun:_ZN4mojo9RemotePtrINS_11ShellClientEE5resetENS_16ScopedHandleBaseINS_15InterfaceHandleIS1_EEEEPNS_5ShellEPNS_12ErrorHandlerEP15MojoAsyncWaiter
+ fun:_ZN7content19MojoApplicationHost4InitEv
+}
+{
+ bug_367809_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4mojo8internal12_GLOBAL__N_19AsyncWaitEP15MojoAsyncWaiterjjmPFvPviES4_
+ fun:_ZN4mojo8internal9Connector14WaitToReadMoreEv
+ fun:_ZN4mojo8internal9ConnectorC1ENS_16ScopedHandleBaseINS_17MessagePipeHandleEEEP15MojoAsyncWaiter
+ fun:_ZN4mojo8internal6RouterC1ENS_16ScopedHandleBaseINS_17MessagePipeHandleEEEP15MojoAsyncWaiter
+ fun:_ZN4mojo9RemotePtrINS_11ShellClientEE5StateC1ENS_16ScopedHandleBaseINS_17MessagePipeHandleEEEPNS_5ShellEPNS_12ErrorHandlerEP15MojoAsyncWaiter
+ fun:_ZN4mojo9RemotePtrINS_11ShellClientEE5resetENS_16ScopedHandleBaseINS_15InterfaceHandleIS1_EEEEPNS_5ShellEPNS_12ErrorHandlerEP15MojoAsyncWaiter
+ fun:_ZN7content19MojoApplicationHost4InitEv
+}
+{
+ bug_367809_c
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4mojo8internal10SharedDataIPNS0_6RouterEEC1ERKS3_
+ fun:_ZN4mojo8internal6RouterC1ENS_16ScopedHandleBaseINS_17MessagePipeHandleEEEP15MojoAsyncWaiter
+ fun:_ZN4mojo9RemotePtrINS_11ShellClientEE5StateC1ENS_16ScopedHandleBaseINS_17MessagePipeHandleEEEPNS_5ShellEPNS_12ErrorHandlerEP15MojoAsyncWaiter
+ fun:_ZN4mojo9RemotePtrINS_11ShellClientEE5resetENS_16ScopedHandleBaseINS_15InterfaceHandleIS1_EEEEPNS_5ShellEPNS_12ErrorHandlerEP15MojoAsyncWaiter
+ fun:_ZN7content19MojoApplicationHost4InitEv
+}
diff --git a/tools/valgrind/tsan_v2/ignores.txt b/tools/valgrind/tsan_v2/ignores.txt
index 10acb4ce74..45d08bac60 100644
--- a/tools/valgrind/tsan_v2/ignores.txt
+++ b/tools/valgrind/tsan_v2/ignores.txt
@@ -10,7 +10,3 @@ fun:*ThreadData*Initialize*
# See http://crbug.com/172104
fun:*v8*internal*ThreadEntry*
-
-# Avoid deadlocks between race reports on _M_rep() called from
-# base::FilePath::StripTrailingSeparatorsInternal(), see http://crbug.com/356676
-fun:*StripTrailingSeparatorsInternal*
diff --git a/tools/valgrind/tsan_v2/suppressions.txt b/tools/valgrind/tsan_v2/suppressions.txt
index 03ae3a7b61..1f8b1851fb 100644
--- a/tools/valgrind/tsan_v2/suppressions.txt
+++ b/tools/valgrind/tsan_v2/suppressions.txt
@@ -47,10 +47,6 @@ race:media::ReleaseData
# http://crbug.com/158922
race:third_party/libvpx/source/libvpx/vp8/encoder/*
-# See http://crbug.com/181502
-race:_M_rep
-race:_M_is_leaked
-
# http://crbug.com/189177
race:thread_manager
race:v8::Locker::Initialize
@@ -198,9 +194,6 @@ race:CommandLine::GetSwitchValueASCII
race:blink::s_platform
race:content::RendererWebKitPlatformSupportImpl::~RendererWebKitPlatformSupportImpl
-# http://crbug.com/342662
-race:SetWindowList
-
# http://crbug.com/345240
race:WTF::s_shutdown
@@ -233,8 +226,8 @@ race:cricket::WebRtcVideoMediaChannel::SetSendCodec
# http://crbug.com/347553
race:blink::WebString::reset
-# https://code.google.com/p/v8/issues/detail?id=3143
-race:v8::internal::FLAG_track_double_fields
+# http://crbug.com/348511
+race:webrtc::acm1::AudioCodingModuleImpl::PlayoutData10Ms
# http://crbug.com/348982
race:cricket::P2PTransportChannel::OnConnectionDestroyed
@@ -257,3 +250,6 @@ race:gfx::ImageFamily::~ImageFamily
# http://crbug.com/364014
race:WTF::Latin1Encoding()::globalLatin1Encoding
+
+# https://code.google.com/p/v8/issues/detail?id=3143
+race:v8::internal::FLAG_track_double_fields