summaryrefslogtreecommitdiff
path: root/build
diff options
context:
space:
mode:
authorTorne (Richard Coles) <torne@google.com>2014-08-28 12:05:23 +0100
committerTorne (Richard Coles) <torne@google.com>2014-08-28 12:05:23 +0100
commit03b57e008b61dfcb1fbad3aea950ae0e001748b0 (patch)
tree9a740c1a5fbe659ec83484b67cbc679332f5a408 /build
parentca7d0c81aa30d24514c34c963f43cd24da34a2bf (diff)
downloadchromium_org-03b57e008b61dfcb1fbad3aea950ae0e001748b0.tar.gz
Merge from Chromium at DEPS revision 291560
This commit was generated by merge_to_master.py. Change-Id: Ic58269055810d51286b4109e59b90b6856887a30
Diffstat (limited to 'build')
-rw-r--r--build/all.gyp11
-rw-r--r--build/android/buildbot/OWNERS1
-rwxr-xr-xbuild/android/gyp/copy_ex.py55
-rwxr-xr-xbuild/android/gyp/create_device_library_links.py15
-rwxr-xr-xbuild/android/gyp/create_native_libraries_header.py56
-rwxr-xr-xbuild/android/gyp/gcc_preprocess.py8
-rwxr-xr-xbuild/android/gyp/lint.py146
-rwxr-xr-xbuild/android/gyp/pack_arm_relocations.py15
-rwxr-xr-xbuild/android/gyp/proguard.py20
-rwxr-xr-xbuild/android/gyp/push_libraries.py15
-rwxr-xr-xbuild/android/gyp/strip_library_for_device.py17
-rwxr-xr-xbuild/android/gyp/write_build_config.py32
-rwxr-xr-xbuild/android/gyp/write_ordered_libraries.py43
-rw-r--r--build/android/pack_arm_relocations.gypi4
-rwxr-xr-xbuild/android/provision_devices.py242
-rw-r--r--build/android/push_libraries.gypi2
-rw-r--r--build/android/pylib/android_commands.py2
-rw-r--r--build/android/pylib/device/device_blacklist.py31
-rw-r--r--build/android/pylib/device_settings.py121
-rw-r--r--build/android/pylib/gtest/gtest_config.py1
-rw-r--r--build/android/pylib/utils/parallelizer.py46
-rw-r--r--build/android/strip_native_libraries.gypi2
-rw-r--r--build/common.gypi60
-rw-r--r--build/config/android/config.gni9
-rw-r--r--build/config/android/internal_rules.gni271
-rw-r--r--build/config/android/rules.gni180
-rw-r--r--build/grit_target.gypi5
-rwxr-xr-xbuild/gyp_chromium7
-rwxr-xr-xbuild/install-build-deps-android.sh5
-rwxr-xr-xbuild/install-build-deps.sh238
-rw-r--r--build/java_apk.gypi29
-rwxr-xr-xbuild/landmines.py2
-rwxr-xr-xbuild/linux/install-arm-sysroot.py29
-rw-r--r--build/linux/system.gyp47
-rw-r--r--build/sanitizers/OWNERS2
-rw-r--r--build/sanitizers/sanitizer_options.cc119
-rw-r--r--build/sanitizers/sanitizers.gyp52
-rw-r--r--build/sanitizers/tsan_suppressions.cc308
-rw-r--r--build/secondary/third_party/android_tools/BUILD.gn46
-rw-r--r--build/secondary/tools/grit/grit_rule.gni6
-rw-r--r--build/secondary/ui/BUILD.gn11
-rw-r--r--build/toolchain/android/BUILD.gn11
-rw-r--r--build/toolchain/gcc_toolchain.gni120
-rw-r--r--build/toolchain/mac/BUILD.gn150
-rw-r--r--build/toolchain/win/BUILD.gn267
-rw-r--r--build/util/LASTCHANGE2
-rw-r--r--build/util/LASTCHANGE.blink2
-rwxr-xr-xbuild/util/lastchange.py20
-rw-r--r--build/whitespace_file.txt6
49 files changed, 1933 insertions, 956 deletions
diff --git a/build/all.gyp b/build/all.gyp
index e4448b4a7f..e6d2446614 100644
--- a/build/all.gyp
+++ b/build/all.gyp
@@ -79,6 +79,7 @@
'dependencies': [
'../third_party/re2/re2.gyp:re2',
'../chrome/chrome.gyp:*',
+ '../cc/blink/cc_blink_tests.gyp:*',
'../cc/cc_tests.gyp:*',
'../device/bluetooth/bluetooth.gyp:*',
'../device/device_tests.gyp:*',
@@ -276,6 +277,7 @@
'conditions': [
['OS!="ios" and OS!="android"', {
'dependencies': [
+ '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
'../cc/cc_tests.gyp:cc_unittests',
'../chrome/chrome.gyp:browser_tests',
'../chrome/chrome.gyp:chromedriver_tests',
@@ -743,6 +745,7 @@
'../breakpad/breakpad.gyp:minidump_dump#host',
'../breakpad/breakpad.gyp:minidump_stackwalk#host',
'../build/android/tests/multiple_proguards/multiple_proguards.gyp:multiple_proguards_test_apk',
+ '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
'../cc/cc_tests.gyp:cc_perftests_apk',
'../cc/cc_tests.gyp:cc_unittests',
'../chrome/chrome.gyp:unit_tests',
@@ -766,13 +769,13 @@
'../tools/android/android_tools.gyp:android_tools',
'../tools/android/android_tools.gyp:memconsumer',
'../tools/android/findbugs_plugin/findbugs_plugin.gyp:findbugs_plugin_test',
- '../tools/android/heap_profiler/heap_profiler.gyp:heap_profiler_unittests_stripped',
'../ui/events/events.gyp:events_unittests',
'../ui/ui_unittests.gyp:ui_unittests',
# Unit test bundles packaged as an apk.
'../android_webview/android_webview.gyp:android_webview_test_apk',
'../android_webview/android_webview.gyp:android_webview_unittests_apk',
'../base/base.gyp:base_unittests_apk',
+ '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests_apk',
'../cc/cc_tests.gyp:cc_unittests_apk',
'../chrome/chrome.gyp:chrome_shell_test_apk',
'../chrome/chrome.gyp:chrome_shell_uiautomator_tests',
@@ -790,6 +793,7 @@
'../sandbox/sandbox.gyp:sandbox_linux_jni_unittests_apk',
'../sql/sql.gyp:sql_unittests_apk',
'../sync/sync.gyp:sync_unit_tests_apk',
+ '../tools/android/heap_profiler/heap_profiler.gyp:heap_profiler_unittests_apk',
'../ui/events/events.gyp:events_unittests_apk',
'../ui/gfx/gfx_tests.gyp:gfx_unittests_apk',
'../ui/ui_unittests.gyp:ui_unittests_apk',
@@ -845,6 +849,7 @@
'target_name': 'chromium_builder_dbg',
'type': 'none',
'dependencies': [
+ '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
'../cc/cc_tests.gyp:cc_unittests',
'../chrome/chrome.gyp:browser_tests',
'../chrome/chrome.gyp:interactive_ui_tests',
@@ -882,6 +887,7 @@
'target_name': 'chromium_builder_rel',
'type': 'none',
'dependencies': [
+ '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
'../cc/cc_tests.gyp:cc_unittests',
'../chrome/chrome.gyp:browser_tests',
'../chrome/chrome.gyp:performance_browser_tests',
@@ -972,6 +978,7 @@
'target_name': 'chromium_builder',
'type': 'none',
'dependencies': [
+ '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
'../cc/cc_tests.gyp:cc_unittests',
'../chrome/chrome.gyp:browser_tests',
'../chrome/chrome.gyp:crash_service',
@@ -1064,6 +1071,7 @@
'../ash/ash.gyp:ash_shell_unittests',
'../ash/ash.gyp:ash_unittests',
'../base/base.gyp:base_unittests',
+ '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
'../cc/cc_tests.gyp:cc_unittests',
'../chrome/chrome.gyp:browser_tests',
'../chrome/chrome.gyp:chrome_app_unittests',
@@ -1177,6 +1185,7 @@
'target_name': 'aura_builder',
'type': 'none',
'dependencies': [
+ '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
'../cc/cc_tests.gyp:cc_unittests',
'../chrome/chrome.gyp:browser_tests',
'../chrome/chrome.gyp:chrome',
diff --git a/build/android/buildbot/OWNERS b/build/android/buildbot/OWNERS
index eb93a68bd8..425f1d9a01 100644
--- a/build/android/buildbot/OWNERS
+++ b/build/android/buildbot/OWNERS
@@ -4,7 +4,6 @@ cmp@chromium.org
craigdh@chromium.org
frankf@chromium.org
navabi@chromium.org
-yfriedman@chromium.org
# backup
ilevy@chromium.org
diff --git a/build/android/gyp/copy_ex.py b/build/android/gyp/copy_ex.py
new file mode 100755
index 0000000000..088880ce48
--- /dev/null
+++ b/build/android/gyp/copy_ex.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies files to a directory."""
+
+import optparse
+import shutil
+import sys
+
+from util import build_utils
+
+
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+
+ parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
+
+ parser.add_option('--dest', help='Directory to copy files to.')
+ parser.add_option('--files', action='append',
+ help='List of files to copy.')
+ parser.add_option('--clear', action='store_true',
+ help='If set, the destination directory will be deleted '
+ 'before copying files to it. This is highly recommended to '
+ 'ensure that no stale files are left in the directory.')
+ parser.add_option('--stamp', help='Path to touch on success.')
+
+ options, _ = parser.parse_args(args)
+
+ if options.clear:
+ build_utils.DeleteDirectory(options.dest)
+ build_utils.MakeDirectory(options.dest)
+
+ files = []
+ for file_arg in options.files:
+ files += build_utils.ParseGypList(file_arg)
+
+ for f in files:
+ shutil.copy(f, options.dest)
+
+ if options.depfile:
+ build_utils.WriteDepfile(
+ options.depfile,
+ options.files + build_utils.GetPythonDependencies())
+
+ if options.stamp:
+ build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
+
diff --git a/build/android/gyp/create_device_library_links.py b/build/android/gyp/create_device_library_links.py
index d5828f4c3b..30e050c00f 100755
--- a/build/android/gyp/create_device_library_links.py
+++ b/build/android/gyp/create_device_library_links.py
@@ -34,7 +34,7 @@ def RunShellCommand(device, cmd):
def CreateSymlinkScript(options):
- libraries = build_utils.ReadJson(options.libraries_json)
+ libraries = build_utils.ParseGypList(options.libraries)
link_cmd = (
'rm $APK_LIBRARIES_DIR/%(lib_basename)s > /dev/null 2>&1 \n'
@@ -78,15 +78,16 @@ def TriggerSymlinkScript(options):
RunShellCommand(device, trigger_cmd)
-def main():
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
parser = optparse.OptionParser()
parser.add_option('--apk', help='Path to the apk.')
parser.add_option('--script-host-path',
help='Path on the host for the symlink script.')
parser.add_option('--script-device-path',
help='Path on the device to push the created symlink script.')
- parser.add_option('--libraries-json',
- help='Path to the json list of native libraries.')
+ parser.add_option('--libraries',
+ help='List of native libraries.')
parser.add_option('--target-dir',
help='Device directory that contains the target libraries for symlinks.')
parser.add_option('--stamp', help='Path to touch on success.')
@@ -94,9 +95,9 @@ def main():
help='Path to build device configuration.')
parser.add_option('--configuration-name',
help='The build CONFIGURATION_NAME')
- options, _ = parser.parse_args()
+ options, _ = parser.parse_args(args)
- required_options = ['apk', 'libraries_json', 'script_host_path',
+ required_options = ['apk', 'libraries', 'script_host_path',
'script_device_path', 'target_dir', 'configuration_name']
build_utils.CheckOptions(options, parser, required=required_options)
constants.SetBuildType(options.configuration_name)
@@ -109,4 +110,4 @@ def main():
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/create_native_libraries_header.py b/build/android/gyp/create_native_libraries_header.py
deleted file mode 100755
index bb91a2a1f5..0000000000
--- a/build/android/gyp/create_native_libraries_header.py
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Writes .h files for NativeLibraries.template
-
-The native library list header should contain the list of native libraries to
-load in the form:
- = { "lib1", "lib2" }
-The version header should contain a version name string of the form
- = "version_name"
-"""
-
-import json
-import optparse
-import os
-import sys
-
-from util import build_utils
-
-
-def main():
- parser = optparse.OptionParser()
-
- parser.add_option('--native-library-list',
- help='Path to generated .java file containing library list')
- parser.add_option('--version-output',
- help='Path to generated .java file containing version name')
- parser.add_option('--ordered-libraries',
- help='Path to json file containing list of ordered libraries')
- parser.add_option('--version-name',
- help='expected version name of native library')
-
- # args should be the list of libraries in dependency order.
- options, _ = parser.parse_args()
-
- build_utils.MakeDirectory(os.path.dirname(options.native_library_list))
-
- with open(options.ordered_libraries, 'r') as libfile:
- libraries = json.load(libfile)
- # Generates string of the form '= { "base", "net",
- # "content_shell_content_view" }' from a list of the form ["libbase.so",
- # libnet.so", "libcontent_shell_content_view.so"]
- libraries = ['"' + lib[3:-3] + '"' for lib in libraries]
- array = '= { ' + ', '.join(libraries) + '}'
-
- with open(options.native_library_list, 'w') as header:
- header.write(array)
-
- with open(options.version_output, 'w') as header:
- header.write('= "%s"' % options.version_name)
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/build/android/gyp/gcc_preprocess.py b/build/android/gyp/gcc_preprocess.py
index b0c5da2812..03becf918f 100755
--- a/build/android/gyp/gcc_preprocess.py
+++ b/build/android/gyp/gcc_preprocess.py
@@ -29,7 +29,9 @@ def DoGcc(options):
build_utils.CheckOutput(gcc_cmd)
-def main():
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
+
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
@@ -39,7 +41,7 @@ def main():
parser.add_option('--stamp', help='Path to touch on success.')
parser.add_option('--defines', help='Pre-defines macros', action='append')
- options, _ = parser.parse_args()
+ options, _ = parser.parse_args(args)
DoGcc(options)
@@ -53,4 +55,4 @@ def main():
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/lint.py b/build/android/gyp/lint.py
index 48ab837e71..c8aef922ad 100755
--- a/build/android/gyp/lint.py
+++ b/build/android/gyp/lint.py
@@ -20,7 +20,7 @@ _SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__),
def _RunLint(lint_path, config_path, processed_config_path, manifest_path,
- result_path, product_dir, src_dirs, jar_path):
+ result_path, product_dir, sources, jar_path):
def _RelativizePath(path):
"""Returns relative path to top-level src dir.
@@ -71,62 +71,89 @@ def _RunLint(lint_path, config_path, processed_config_path, manifest_path,
print >> sys.stderr, error_line
return len(issues)
- _ProcessConfigFile()
-
- cmd = [
- lint_path, '-Werror', '--exitcode', '--showall',
- '--config', _RelativizePath(processed_config_path),
- '--classpath', _RelativizePath(jar_path),
- '--xml', _RelativizePath(result_path),
- ]
- for src in src_dirs:
- cmd.extend(['--sources', _RelativizePath(src)])
- cmd.append(_RelativizePath(os.path.join(manifest_path, os.pardir)))
-
- if os.path.exists(result_path):
- os.remove(result_path)
-
- try:
- build_utils.CheckOutput(cmd, cwd=_SRC_ROOT)
- except build_utils.CalledProcessError as e:
- # There is a problem with lint usage
- if not os.path.exists(result_path):
- print 'Something is wrong:'
- print e
- return 0
-
- # There are actual lint issues
- else:
- try:
- num_issues = _ParseAndShowResultFile()
- except Exception:
- print 'Lint created unparseable xml file...'
- print 'File contents:'
- with open(result_path) as f:
- print f.read()
+ with build_utils.TempDir() as temp_dir:
+ _ProcessConfigFile()
+
+ cmd = [
+ _RelativizePath(lint_path), '-Werror', '--exitcode', '--showall',
+ '--config', _RelativizePath(processed_config_path),
+ '--classpath', _RelativizePath(jar_path),
+ '--xml', _RelativizePath(result_path),
+ ]
+
+ # There may be multiple source files with the same basename (but in
+ # different directories). It is difficult to determine what part of the path
+ # corresponds to the java package, and so instead just link the source files
+ # into temporary directories (creating a new one whenever there is a name
+ # conflict).
+ src_dirs = []
+ def NewSourceDir():
+ new_dir = os.path.join(temp_dir, str(len(src_dirs)))
+ os.mkdir(new_dir)
+ src_dirs.append(new_dir)
+ cmd.extend(['--sources', _RelativizePath(new_dir)])
+ return new_dir
+
+ def PathInDir(d, src):
+ return os.path.join(d, os.path.basename(src))
+
+ for src in sources:
+ src_dir = None
+ for d in src_dirs:
+ if not os.path.exists(PathInDir(d, src)):
+ src_dir = d
+ break
+ if not src_dir:
+ src_dir = NewSourceDir()
+ os.symlink(os.path.abspath(src), PathInDir(src_dir, src))
+
+ cmd.append(_RelativizePath(os.path.join(manifest_path, os.pardir)))
+
+ if os.path.exists(result_path):
+ os.remove(result_path)
+
+ try:
+ build_utils.CheckOutput(cmd, cwd=_SRC_ROOT)
+ except build_utils.CalledProcessError as e:
+ # There is a problem with lint usage
+ if not os.path.exists(result_path):
+ print 'Something is wrong:'
+ print e
return 0
- _ProcessResultFile()
- msg = ('\nLint found %d new issues.\n'
- ' - For full explanation refer to %s\n'
- ' - Wanna suppress these issues?\n'
- ' 1. Read comment in %s\n'
- ' 2. Run "python %s %s"\n' %
- (num_issues,
- _RelativizePath(result_path),
- _RelativizePath(config_path),
- _RelativizePath(os.path.join(_SRC_ROOT, 'build', 'android',
- 'lint', 'suppress.py')),
- _RelativizePath(result_path)))
- print >> sys.stderr, msg
- # Lint errors do not fail the build.
- return 0
+ # There are actual lint issues
+ else:
+ try:
+ num_issues = _ParseAndShowResultFile()
+ except Exception:
+ print 'Lint created unparseable xml file...'
+ print 'File contents:'
+ with open(result_path) as f:
+ print f.read()
+ return 0
+
+ _ProcessResultFile()
+ msg = ('\nLint found %d new issues.\n'
+ ' - For full explanation refer to %s\n'
+ ' - Wanna suppress these issues?\n'
+ ' 1. Read comment in %s\n'
+ ' 2. Run "python %s %s"\n' %
+ (num_issues,
+ _RelativizePath(result_path),
+ _RelativizePath(config_path),
+ _RelativizePath(os.path.join(_SRC_ROOT, 'build', 'android',
+ 'lint', 'suppress.py')),
+ _RelativizePath(result_path)))
+ print >> sys.stderr, msg
+ # Lint errors do not fail the build.
+ return 0
return 0
def main():
parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
parser.add_option('--lint-path', help='Path to lint executable.')
parser.add_option('--config-path', help='Path to lint suppressions file.')
parser.add_option('--processed-config-path',
@@ -135,6 +162,7 @@ def main():
parser.add_option('--result-path', help='Path to XML lint result file.')
parser.add_option('--product-dir', help='Path to product dir.')
parser.add_option('--src-dirs', help='Directories containing java files.')
+ parser.add_option('--java-files', help='Paths to java files.')
parser.add_option('--jar-path', help='Jar file containing class files.')
parser.add_option('--stamp', help='Path to touch on success.')
parser.add_option('--enable', action='store_true',
@@ -145,18 +173,30 @@ def main():
build_utils.CheckOptions(
options, parser, required=['lint_path', 'config_path',
'processed_config_path', 'manifest_path',
- 'result_path', 'product_dir', 'src_dirs',
+ 'result_path', 'product_dir',
'jar_path'])
- src_dirs = build_utils.ParseGypList(options.src_dirs)
-
rc = 0
if options.enable:
+ sources = []
+ if options.src_dirs:
+ src_dirs = build_utils.ParseGypList(options.src_dirs)
+ sources = build_utils.FindInDirectories(src_dirs, '*.java')
+ elif options.java_files:
+ sources = build_utils.ParseGypList(options.java_files)
+ else:
+ print 'One of --src-dirs or --java-files must be specified.'
+ return 1
rc = _RunLint(options.lint_path, options.config_path,
options.processed_config_path,
options.manifest_path, options.result_path,
- options.product_dir, src_dirs, options.jar_path)
+ options.product_dir, sources, options.jar_path)
+
+ if options.depfile:
+ build_utils.WriteDepfile(
+ options.depfile,
+ build_utils.GetPythonDependencies())
if options.stamp and not rc:
build_utils.Touch(options.stamp)
diff --git a/build/android/gyp/pack_arm_relocations.py b/build/android/gyp/pack_arm_relocations.py
index 54d63d7a0b..d650927ac8 100755
--- a/build/android/gyp/pack_arm_relocations.py
+++ b/build/android/gyp/pack_arm_relocations.py
@@ -21,7 +21,6 @@ irrespective of any --enable-packing setting. Typically this would be
'libchromium_android_linker.so'.
"""
-import json
import optparse
import os
import shlex
@@ -59,7 +58,8 @@ def CopyArmLibraryUnchanged(library_path, output_path):
shutil.copy(library_path, output_path)
-def main():
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
parser = optparse.OptionParser()
parser.add_option('--configuration-name',
@@ -80,17 +80,16 @@ def main():
help='Directory for stripped libraries')
parser.add_option('--packed-libraries-dir',
help='Directory for packed libraries')
- parser.add_option('--libraries-file',
- help='Path to json file containing list of libraries')
+ parser.add_option('--libraries',
+ help='List of libraries')
parser.add_option('--stamp', help='Path to touch on success')
- options, _ = parser.parse_args()
+ options, _ = parser.parse_args(args)
enable_packing = (options.enable_packing == '1' and
options.configuration_name == 'Release')
exclude_packing_set = set(shlex.split(options.exclude_packing_list))
- with open(options.libraries_file, 'r') as libfile:
- libraries = json.load(libfile)
+ libraries = build_utils.ParseGypList(options.libraries)
build_utils.MakeDirectory(options.packed_libraries_dir)
@@ -113,4 +112,4 @@ def main():
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/proguard.py b/build/android/gyp/proguard.py
index b27365b7fc..ca5877051b 100755
--- a/build/android/gyp/proguard.py
+++ b/build/android/gyp/proguard.py
@@ -13,7 +13,9 @@ from util import build_utils
def DoProguard(options):
injars = options.input_path
outjars = options.output_path
- classpath = build_utils.ParseGypList(options.classpath)
+ classpath = []
+ for arg in options.classpath:
+ classpath += build_utils.ParseGypList(arg)
classpath = list(set(classpath))
libraryjars = ':'.join(classpath)
# proguard does its own dependency checking, which can be avoided by deleting
@@ -29,8 +31,10 @@ def DoProguard(options):
build_utils.CheckOutput(proguard_cmd, print_stdout=True)
-def main():
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
parser.add_option('--proguard-path',
help='Path to the proguard executable.')
parser.add_option('--input-path',
@@ -38,16 +42,22 @@ def main():
parser.add_option('--output-path', help='Path to the generated .jar file.')
parser.add_option('--proguard-config',
help='Path to the proguard configuration file.')
- parser.add_option('--classpath', help="Classpath for proguard.")
+ parser.add_option('--classpath', action='append',
+ help="Classpath for proguard.")
parser.add_option('--stamp', help='Path to touch on success.')
- options, _ = parser.parse_args()
+ options, _ = parser.parse_args(args)
DoProguard(options)
+ if options.depfile:
+ build_utils.WriteDepfile(
+ options.depfile,
+ build_utils.GetPythonDependencies())
+
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/push_libraries.py b/build/android/gyp/push_libraries.py
index efb9dfcc6f..63421e9a02 100755
--- a/build/android/gyp/push_libraries.py
+++ b/build/android/gyp/push_libraries.py
@@ -22,7 +22,7 @@ from util import build_utils
from util import md5_check
def DoPush(options):
- libraries = build_utils.ReadJson(options.libraries_json)
+ libraries = build_utils.ParseGypList(options.libraries)
device = build_device.GetBuildDeviceFromPath(
options.build_device_configuration)
@@ -50,22 +50,23 @@ def DoPush(options):
input_strings=[device_path])
-def main():
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
parser = optparse.OptionParser()
parser.add_option('--libraries-dir',
help='Directory that contains stripped libraries.')
parser.add_option('--device-dir',
help='Device directory to push the libraries to.')
- parser.add_option('--libraries-json',
- help='Path to the json list of native libraries.')
+ parser.add_option('--libraries',
+ help='List of native libraries.')
parser.add_option('--stamp', help='Path to touch on success.')
parser.add_option('--build-device-configuration',
help='Path to build device configuration.')
parser.add_option('--configuration-name',
help='The build CONFIGURATION_NAME')
- options, _ = parser.parse_args()
+ options, _ = parser.parse_args(args)
- required_options = ['libraries_dir', 'device_dir', 'libraries_json']
+ required_options = ['libraries', 'device_dir', 'libraries']
build_utils.CheckOptions(options, parser, required=required_options)
constants.SetBuildType(options.configuration_name)
@@ -76,4 +77,4 @@ def main():
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/strip_library_for_device.py b/build/android/gyp/strip_library_for_device.py
index ce23993ac2..9e2daae33a 100755
--- a/build/android/gyp/strip_library_for_device.py
+++ b/build/android/gyp/strip_library_for_device.py
@@ -4,7 +4,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import json
import optparse
import os
import sys
@@ -20,9 +19,11 @@ def StripLibrary(android_strip, android_strip_args, library_path, output_path):
build_utils.CheckOutput(strip_cmd)
+def main(args):
+ args = build_utils.ExpandFileArgs(args)
-def main():
parser = optparse.OptionParser()
+ build_utils.AddDepfileOption(parser)
parser.add_option('--android-strip',
help='Path to the toolchain\'s strip binary')
@@ -32,15 +33,13 @@ def main():
help='Directory for un-stripped libraries')
parser.add_option('--stripped-libraries-dir',
help='Directory for stripped libraries')
- parser.add_option('--libraries-file',
- help='Path to json file containing list of libraries')
+ parser.add_option('--libraries',
+ help='List of libraries to strip')
parser.add_option('--stamp', help='Path to touch on success')
+ options, _ = parser.parse_args(args)
- options, _ = parser.parse_args()
-
- with open(options.libraries_file, 'r') as libfile:
- libraries = json.load(libfile)
+ libraries = build_utils.ParseGypList(options.libraries)
build_utils.MakeDirectory(options.stripped_libraries_dir)
@@ -59,4 +58,4 @@ def main():
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/write_build_config.py b/build/android/gyp/write_build_config.py
index b836313856..04437b6047 100755
--- a/build/android/gyp/write_build_config.py
+++ b/build/android/gyp/write_build_config.py
@@ -36,6 +36,9 @@ import sys
from util import build_utils
+import write_ordered_libraries
+
+
dep_config_cache = {}
def GetDepConfig(path):
if not path in dep_config_cache:
@@ -74,6 +77,10 @@ def main(argv):
parser.add_option('--jar-path', help='Path to target\'s jar output.')
parser.add_option('--dex-path', help='Path to target\'s dex output.')
+ # apk native library options
+ parser.add_option('--native-libs', help='List of top-level native libs.')
+ parser.add_option('--readelf-path', help='Path to toolchain\'s readelf.')
+
options, args = parser.parse_args(argv)
if args:
@@ -91,14 +98,15 @@ def main(argv):
'android_apk': ['jar_path', 'dex_path', 'resources_zip']
}[options.type]
+ if options.native_libs:
+ required_options += ['readelf_path']
+
build_utils.CheckOptions(options, parser, required_options)
possible_deps_config_paths = build_utils.ParseGypList(
options.possible_deps_configs)
-
-
allow_unknown_deps = options.type == 'android_apk'
unknown_deps = [
c for c in possible_deps_config_paths if not os.path.exists(c)]
@@ -157,6 +165,26 @@ def main(argv):
dex_deps_files = [c['dex_path'] for c in all_library_deps]
dex_config['dependency_dex_files'] = dex_deps_files
+ library_paths = []
+ java_libraries_list = []
+ if options.native_libs:
+ libraries = build_utils.ParseGypList(options.native_libs)
+ libraries_dir = os.path.dirname(libraries[0])
+ write_ordered_libraries.SetReadelfPath(options.readelf_path)
+ write_ordered_libraries.SetLibraryDirs([libraries_dir])
+ all_native_library_deps = (
+ write_ordered_libraries.GetSortedTransitiveDependenciesForBinaries(
+ libraries))
+ java_libraries_list = '{%s}' % ','.join(
+ ['"%s"' % s for s in all_native_library_deps])
+ library_paths = map(
+ write_ordered_libraries.FullLibraryPath, all_native_library_deps)
+
+ config['native'] = {
+ 'libraries': library_paths,
+ 'java_libraries_list': java_libraries_list
+ }
+
build_utils.WriteJson(config, options.build_config, only_if_changed=True)
if options.depfile:
diff --git a/build/android/gyp/write_ordered_libraries.py b/build/android/gyp/write_ordered_libraries.py
index 31aba74cca..954f12daef 100755
--- a/build/android/gyp/write_ordered_libraries.py
+++ b/build/android/gyp/write_ordered_libraries.py
@@ -27,13 +27,26 @@ import sys
from util import build_utils
-_options = None
+_readelf = None
+_library_dirs = None
+
_library_re = re.compile(
'.*NEEDED.*Shared library: \[(?P<library_name>[\w/.]+)\]')
+def SetReadelfPath(path):
+ global _readelf
+ _readelf = path
+
+
+def SetLibraryDirs(dirs):
+ global _library_dirs
+ _library_dirs = dirs
+
+
def FullLibraryPath(library_name):
- for directory in _options.libraries_dir.split(','):
+ assert _library_dirs is not None
+ for directory in _library_dirs:
path = '%s/%s' % (directory, library_name)
if os.path.exists(path):
return path
@@ -47,9 +60,10 @@ def IsSystemLibrary(library_name):
def CallReadElf(library_or_executable):
- readelf_cmd = [_options.readelf,
+ assert _readelf is not None
+ readelf_cmd = [_readelf,
'-d',
- library_or_executable]
+ FullLibraryPath(library_or_executable)]
return build_utils.CheckOutput(readelf_cmd)
@@ -91,17 +105,26 @@ def main():
parser.add_option('--output', help='Path to the generated .json file.')
parser.add_option('--stamp', help='Path to touch on success.')
- global _options
- _options, _ = parser.parse_args()
+ options, _ = parser.parse_args()
+
+ SetReadelfPath(options.readelf)
+ SetLibraryDirs(options.libraries_dir.split(','))
- libraries = build_utils.ParseGypList(_options.input_libraries)
+ libraries = build_utils.ParseGypList(options.input_libraries)
if len(libraries):
libraries = GetSortedTransitiveDependenciesForBinaries(libraries)
- build_utils.WriteJson(libraries, _options.output, only_if_changed=True)
+ # Convert to "base" library names: e.g. libfoo.so -> foo
+ java_libraries_list = (
+ '{%s}' % ','.join(['"%s"' % s[3:-3] for s in libraries]))
+
+ build_utils.WriteJson(
+ {'libraries': libraries, 'java_libraries_list': java_libraries_list},
+ options.output,
+ only_if_changed=True)
- if _options.stamp:
- build_utils.Touch(_options.stamp)
+ if options.stamp:
+ build_utils.Touch(options.stamp)
if __name__ == '__main__':
diff --git a/build/android/pack_arm_relocations.gypi b/build/android/pack_arm_relocations.gypi
index cb9a77b426..e982527b4c 100644
--- a/build/android/pack_arm_relocations.gypi
+++ b/build/android/pack_arm_relocations.gypi
@@ -53,7 +53,7 @@
'--android-objcopy=<(android_objcopy)',
'--stripped-libraries-dir=<(stripped_libraries_dir)',
'--packed-libraries-dir=<(packed_libraries_dir)',
- '--libraries-file=<(ordered_libraries_file)',
+ '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
'--stamp=<(stamp)',
],
}, {
@@ -64,7 +64,7 @@
'--enable-packing=0',
'--stripped-libraries-dir=<(stripped_libraries_dir)',
'--packed-libraries-dir=<(packed_libraries_dir)',
- '--libraries-file=<(ordered_libraries_file)',
+ '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
'--stamp=<(stamp)',
],
}],
diff --git a/build/android/provision_devices.py b/build/android/provision_devices.py
index 44ae971876..54c90c3a6e 100755
--- a/build/android/provision_devices.py
+++ b/build/android/provision_devices.py
@@ -24,6 +24,7 @@ from pylib import device_settings
from pylib.device import device_blacklist
from pylib.device import device_errors
from pylib.device import device_utils
+from pylib.utils import run_tests_helper
sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT,
'third_party', 'android_testrunner'))
@@ -34,8 +35,8 @@ def KillHostHeartbeat():
stdout, _ = ps.communicate()
matches = re.findall('\\n.*host_heartbeat.*', stdout)
for match in matches:
- print 'An instance of host heart beart running... will kill'
- pid = re.findall('(\d+)', match)[0]
+ logging.info('An instance of host heart beart running... will kill')
+ pid = re.findall('(\S+)', match)[1]
subprocess.call(['kill', str(pid)])
@@ -43,47 +44,45 @@ def LaunchHostHeartbeat():
# Kill if existing host_heartbeat
KillHostHeartbeat()
# Launch a new host_heartbeat
- print 'Spawning host heartbeat...'
+ logging.info('Spawning host heartbeat...')
subprocess.Popen([os.path.join(constants.DIR_SOURCE_ROOT,
'build/android/host_heartbeat.py')])
-def PushAndLaunchAdbReboot(devices, target):
+def PushAndLaunchAdbReboot(device, target):
"""Pushes and launches the adb_reboot binary on the device.
Arguments:
- devices: The list of serial numbers of the device to which the
- adb_reboot binary should be pushed.
- target : The build target (example, Debug or Release) which helps in
- locating the adb_reboot binary.
+ device: The DeviceUtils instance for the device to which the adb_reboot
+ binary should be pushed.
+ target: The build target (example, Debug or Release) which helps in
+ locating the adb_reboot binary.
"""
- for device_serial in devices:
- print 'Will push and launch adb_reboot on %s' % device_serial
- device = device_utils.DeviceUtils(device_serial)
- # Kill if adb_reboot is already running.
- try:
- # Don't try to kill adb_reboot more than once. We don't expect it to be
- # running at all.
- device.KillAll('adb_reboot', blocking=True, timeout=2, retries=0)
- except device_errors.CommandFailedError:
- # We can safely ignore the exception because we don't expect adb_reboot
- # to be running.
- pass
- # Push adb_reboot
- print ' Pushing adb_reboot ...'
- adb_reboot = os.path.join(constants.DIR_SOURCE_ROOT,
- 'out/%s/adb_reboot' % target)
- device.PushChangedFiles(adb_reboot, '/data/local/tmp/')
- # Launch adb_reboot
- print ' Launching adb_reboot ...'
- device.old_interface.GetAndroidToolStatusAndOutput(
- '/data/local/tmp/adb_reboot')
- LaunchHostHeartbeat()
+ logging.info('Will push and launch adb_reboot on %s' % str(device))
+ # Kill if adb_reboot is already running.
+ try:
+ # Don't try to kill adb_reboot more than once. We don't expect it to be
+ # running at all.
+ device.KillAll('adb_reboot', blocking=True, timeout=2, retries=0)
+ except device_errors.CommandFailedError:
+ # We can safely ignore the exception because we don't expect adb_reboot
+ # to be running.
+ pass
+ # Push adb_reboot
+ logging.info(' Pushing adb_reboot ...')
+ adb_reboot = os.path.join(constants.DIR_SOURCE_ROOT,
+ 'out/%s/adb_reboot' % target)
+ device.PushChangedFiles(adb_reboot, '/data/local/tmp/')
+ # Launch adb_reboot
+ logging.info(' Launching adb_reboot ...')
+ device.old_interface.GetAndroidToolStatusAndOutput(
+ '/data/local/tmp/adb_reboot')
def _ConfigureLocalProperties(device, is_perf):
"""Set standard readonly testing device properties prior to reboot."""
local_props = [
+ 'persist.sys.usb.config=adb',
'ro.monkey=1',
'ro.test_harness=1',
'ro.audio.silent=1',
@@ -137,125 +136,102 @@ def WipeDeviceData(device):
as_root=True)
-def WipeDevicesIfPossible(devices):
- devices_to_reboot = []
- for device_serial in devices:
- device = device_utils.DeviceUtils(device_serial)
- if not device.old_interface.EnableAdbRoot():
- continue
+def WipeDeviceIfPossible(device):
+ try:
+ device.EnableRoot()
WipeDeviceData(device)
- devices_to_reboot.append(device)
+ # TODO(jbudorick): Tune the timeout per OS version.
+ device.Reboot(True, timeout=600, retries=0)
+ except (errors.DeviceUnresponsiveError, device_errors.CommandFailedError):
+ pass
- if devices_to_reboot:
- try:
- device_utils.DeviceUtils.parallel(devices_to_reboot).Reboot(True)
- except errors.DeviceUnresponsiveError:
- pass
- for device_serial in devices_to_reboot:
- device.WaitUntilFullyBooted(timeout=90)
-
-
-def ProvisionDevice(device_serial, is_perf, disable_location):
- device = device_utils.DeviceUtils(device_serial)
- device.old_interface.EnableAdbRoot()
- _ConfigureLocalProperties(device, is_perf)
- device_settings_map = device_settings.DETERMINISTIC_DEVICE_SETTINGS
- if disable_location:
- device_settings_map.update(device_settings.DISABLE_LOCATION_SETTING)
- else:
- device_settings_map.update(device_settings.ENABLE_LOCATION_SETTING)
- device_settings.ConfigureContentSettingsDict(device, device_settings_map)
- device_settings.SetLockScreenSettings(device)
- if is_perf:
- # TODO(tonyg): We eventually want network on. However, currently radios
- # can cause perfbots to drain faster than they charge.
- device_settings.ConfigureContentSettingsDict(
- device, device_settings.NETWORK_DISABLED_SETTINGS)
- # Some perf bots run benchmarks with USB charging disabled which leads
- # to gradual draining of the battery. We must wait for a full charge
- # before starting a run in order to keep the devices online.
+
+def ProvisionDevice(device, options, is_perf):
+ try:
+ if not options.skip_wipe:
+ WipeDeviceIfPossible(device)
try:
- battery_info = device.old_interface.GetBatteryInfo()
- except Exception as e:
- battery_info = {}
- logging.error('Unable to obtain battery info for %s, %s',
- device_serial, e)
-
- while int(battery_info.get('level', 100)) < 95:
- if not device.old_interface.IsDeviceCharging():
- if device.old_interface.CanControlUsbCharging():
- device.old_interface.EnableUsbCharging()
- else:
- logging.error('Device is not charging')
- break
- logging.info('Waiting for device to charge. Current level=%s',
+ device.EnableRoot()
+ except device_errors.CommandFailedError as e:
+ logging.warning(str(e))
+ _ConfigureLocalProperties(device, is_perf)
+ device_settings.ConfigureContentSettings(
+ device, device_settings.DETERMINISTIC_DEVICE_SETTINGS)
+ if options.disable_location:
+ device_settings.ConfigureContentSettings(
+ device, device_settings.DISABLE_LOCATION_SETTINGS)
+ else:
+ device_settings.ConfigureContentSettings(
+ device, device_settings.ENABLE_LOCATION_SETTINGS)
+ device_settings.SetLockScreenSettings(device)
+ if is_perf:
+ # TODO(tonyg): We eventually want network on. However, currently radios
+ # can cause perfbots to drain faster than they charge.
+ device_settings.ConfigureContentSettings(
+ device, device_settings.NETWORK_DISABLED_SETTINGS)
+ # Some perf bots run benchmarks with USB charging disabled which leads
+ # to gradual draining of the battery. We must wait for a full charge
+ # before starting a run in order to keep the devices online.
+ try:
+ battery_info = device.old_interface.GetBatteryInfo()
+ except Exception as e:
+ battery_info = {}
+ logging.error('Unable to obtain battery info for %s, %s',
+ str(device), e)
+
+ while int(battery_info.get('level', 100)) < 95:
+ if not device.old_interface.IsDeviceCharging():
+ if device.old_interface.CanControlUsbCharging():
+ device.old_interface.EnableUsbCharging()
+ else:
+ logging.error('Device is not charging')
+ break
+ logging.info('Waiting for device to charge. Current level=%s',
battery_info.get('level', 0))
- time.sleep(60)
- battery_info = device.old_interface.GetBatteryInfo()
- device.RunShellCommand('date -u %f' % time.time(), as_root=True)
+ time.sleep(60)
+ battery_info = device.old_interface.GetBatteryInfo()
+ device.RunShellCommand('date -u %f' % time.time(), as_root=True)
+ # TODO(jbudorick): Tune the timeout per OS version.
+ device.Reboot(True, timeout=600, retries=0)
+ props = device.RunShellCommand('getprop')
+ for prop in props:
+ logging.info(' %s' % prop)
+ if options.auto_reconnect:
+ PushAndLaunchAdbReboot(device, options.target)
+ except (errors.WaitForResponseTimedOutError,
+ device_errors.CommandTimeoutError):
+ logging.info('Timed out waiting for device %s. Adding to blacklist.',
+ str(device))
+ # Device black list is reset by bb_device_status_check.py per build.
+ device_blacklist.ExtendBlacklist([str(device)])
+ except (device_errors.CommandFailedError):
+ logging.info('Failed to provision device %s. Adding to blacklist.',
+ str(device))
+ device_blacklist.ExtendBlacklist([str(device)])
def ProvisionDevices(options):
is_perf = 'perf' in os.environ.get('BUILDBOT_BUILDERNAME', '').lower()
- # TODO(jbudorick): Parallelize provisioning of all attached devices after
- # switching from AndroidCommands.
if options.device is not None:
devices = [options.device]
else:
devices = android_commands.GetAttachedDevices()
- # Wipe devices (unless --skip-wipe was specified)
- if not options.skip_wipe:
- WipeDevicesIfPossible(devices)
-
- bad_devices = []
- # Provision devices
- for device_serial in devices:
- try:
- ProvisionDevice(device_serial, is_perf, options.disable_location)
- except errors.WaitForResponseTimedOutError:
- logging.info('Timed out waiting for device %s. Adding to blacklist.',
- device_serial)
- bad_devices.append(device_serial)
- # Device black list is reset by bb_device_status_check.py per build.
- device_blacklist.ExtendBlacklist([device_serial])
- devices = [device for device in devices if device not in bad_devices]
-
- # If there are no good devices
- if not devices:
- raise device_errors.NoDevicesError
-
- try:
- device_utils.DeviceUtils.parallel(devices).Reboot(True)
- except errors.DeviceUnresponsiveError:
- pass
-
- bad_devices = []
- for device_serial in devices:
- device = device_utils.DeviceUtils(device_serial)
- try:
- device.WaitUntilFullyBooted(timeout=90)
- (_, prop) = device.old_interface.GetShellCommandStatusAndOutput('getprop')
- for p in prop:
- print p
- except errors.WaitForResponseTimedOutError:
- logging.info('Timed out waiting for device %s. Adding to blacklist.',
- device_serial)
- bad_devices.append(device_serial)
- # Device black list is reset by bb_device_status_check.py per build.
- device_blacklist.ExtendBlacklist([device_serial])
- devices = [device for device in devices if device not in bad_devices]
-
- # If there are no good devices
- if not devices:
- raise device_errors.NoDevicesError
-
+ parallel_devices = device_utils.DeviceUtils.parallel(devices)
+ parallel_devices.pMap(ProvisionDevice, options, is_perf)
if options.auto_reconnect:
- PushAndLaunchAdbReboot(devices, options.target)
+ LaunchHostHeartbeat()
+ blacklist = device_blacklist.ReadBlacklist()
+ if all(d in blacklist for d in devices):
+ raise device_errors.NoDevicesError
+ return 0
def main(argv):
- logging.basicConfig(level=logging.INFO)
+ custom_handler = logging.StreamHandler(sys.stdout)
+ custom_handler.setFormatter(run_tests_helper.CustomFormatter())
+ logging.getLogger().addHandler(custom_handler)
+ logging.getLogger().setLevel(logging.INFO)
parser = optparse.OptionParser()
parser.add_option('--skip-wipe', action='store_true', default=False,
@@ -275,7 +251,7 @@ def main(argv):
print >> sys.stderr, 'Unused args %s' % args
return 1
- ProvisionDevices(options)
+ return ProvisionDevices(options)
if __name__ == '__main__':
diff --git a/build/android/push_libraries.gypi b/build/android/push_libraries.gypi
index f5f557087f..d74fb21999 100644
--- a/build/android/push_libraries.gypi
+++ b/build/android/push_libraries.gypi
@@ -41,7 +41,7 @@
'--build-device-configuration=<(build_device_config_path)',
'--libraries-dir=<(libraries_source_dir)',
'--device-dir=<(device_library_dir)',
- '--libraries-json=<(ordered_libraries_file)',
+ '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
'--stamp=<(push_stamp)',
'--configuration-name=<(configuration_name)',
],
diff --git a/build/android/pylib/android_commands.py b/build/android/pylib/android_commands.py
index 5fc2fbd0b0..7c73b526da 100644
--- a/build/android/pylib/android_commands.py
+++ b/build/android/pylib/android_commands.py
@@ -713,7 +713,7 @@ class AndroidCommands(object):
"""
self._LogShell(command)
if "'" in command:
- logging.warning(command + " contains ' quotes")
+ command = command.replace('\'', '\'\\\'\'')
result = self._adb.SendShellCommand(
"'%s'" % command, timeout_time).splitlines()
# TODO(b.kelemen): we should really be able to drop the stderr of the
diff --git a/build/android/pylib/device/device_blacklist.py b/build/android/pylib/device/device_blacklist.py
index b2124a74fa..a141d62b81 100644
--- a/build/android/pylib/device/device_blacklist.py
+++ b/build/android/pylib/device/device_blacklist.py
@@ -4,6 +4,7 @@
import json
import os
+import threading
from pylib import constants
_BLACKLIST_JSON = os.path.join(
@@ -11,17 +12,22 @@ _BLACKLIST_JSON = os.path.join(
os.environ.get('CHROMIUM_OUT_DIR', 'out'),
'bad_devices.json')
+# Note that this only protects against concurrent accesses to the blacklist
+# within a process.
+_blacklist_lock = threading.RLock()
+
def ReadBlacklist():
"""Reads the blacklist from the _BLACKLIST_JSON file.
Returns:
A list containing bad devices.
"""
- if not os.path.exists(_BLACKLIST_JSON):
- return []
+ with _blacklist_lock:
+ if not os.path.exists(_BLACKLIST_JSON):
+ return []
- with open(_BLACKLIST_JSON, 'r') as f:
- return json.load(f)
+ with open(_BLACKLIST_JSON, 'r') as f:
+ return json.load(f)
def WriteBlacklist(blacklist):
@@ -30,8 +36,9 @@ def WriteBlacklist(blacklist):
Args:
blacklist: list of bad devices to write to the _BLACKLIST_JSON file.
"""
- with open(_BLACKLIST_JSON, 'w') as f:
- json.dump(list(set(blacklist)), f)
+ with _blacklist_lock:
+ with open(_BLACKLIST_JSON, 'w') as f:
+ json.dump(list(set(blacklist)), f)
def ExtendBlacklist(devices):
@@ -40,13 +47,15 @@ def ExtendBlacklist(devices):
Args:
devices: list of bad devices to be added to the _BLACKLIST_JSON file.
"""
- blacklist = ReadBlacklist()
- blacklist.extend(devices)
- WriteBlacklist(blacklist)
+ with _blacklist_lock:
+ blacklist = ReadBlacklist()
+ blacklist.extend(devices)
+ WriteBlacklist(blacklist)
def ResetBlacklist():
"""Erases the _BLACKLIST_JSON file if it exists."""
- if os.path.exists(_BLACKLIST_JSON):
- os.remove(_BLACKLIST_JSON)
+ with _blacklist_lock:
+ if os.path.exists(_BLACKLIST_JSON):
+ os.remove(_BLACKLIST_JSON)
diff --git a/build/android/pylib/device_settings.py b/build/android/pylib/device_settings.py
index 3612a81f66..bc39b5d3aa 100644
--- a/build/android/pylib/device_settings.py
+++ b/build/android/pylib/device_settings.py
@@ -10,8 +10,8 @@ _LOCK_SCREEN_SETTINGS_PATH = '/data/system/locksettings.db'
PASSWORD_QUALITY_UNSPECIFIED = '0'
-def ConfigureContentSettingsDict(device, desired_settings):
- """Configures device content setings from a dictionary.
+def ConfigureContentSettings(device, desired_settings):
+ """Configures device content setings from a list.
Many settings are documented at:
http://developer.android.com/reference/android/provider/Settings.Global.html
@@ -22,7 +22,7 @@ def ConfigureContentSettingsDict(device, desired_settings):
Args:
device: A DeviceUtils instance for the device to configure.
- desired_settings: A dict of {table: {key: value}} for all
+ desired_settings: A list of (table, [(key: value), ...]) for all
settings to configure.
"""
try:
@@ -36,13 +36,10 @@ def ConfigureContentSettingsDict(device, desired_settings):
logging.error('Skipping content settings configuration due to outdated sdk')
return
- device.SetProp('persist.sys.usb.config', 'adb')
- device.old_interface.WaitForDevicePm()
-
if device.GetProp('ro.build.type') == 'userdebug':
- for table, key_value in sorted(desired_settings.iteritems()):
+ for table, key_value in desired_settings:
settings = content_settings.ContentSettings(table, device)
- for key, value in key_value.iteritems():
+ for key, value in key_value:
settings[key] = value
logging.info('\n%s %s', table, (80 - len(table)) * '-')
for key, value in sorted(settings.iteritems()):
@@ -93,84 +90,90 @@ commit transaction;""" % {
'columns': ', '.join(columns),
'values': ', '.join(["'%s'" % value for value in values])
}
- output_msg = device.RunShellCommand('\'sqlite3 %s "%s"\'' % (db, cmd))
+ output_msg = device.RunShellCommand('sqlite3 %s "%s"' % (db, cmd))
if output_msg:
print ' '.join(output_msg)
-ENABLE_LOCATION_SETTING = {
- 'settings/secure': {
+ENABLE_LOCATION_SETTINGS = [
+ # Note that setting these in this order is required in order for all of
+ # them to take and stick through a reboot.
+ ('com.google.settings/partner', [
+ ('use_location_for_services', 1),
+ ]),
+ ('settings/secure', [
# Ensure Geolocation is enabled and allowed for tests.
- 'location_providers_allowed': 'gps,network',
- }
-}
-
-DISABLE_LOCATION_SETTING = {
- 'settings/secure': {
+ ('location_providers_allowed', 'gps,network'),
+ ]),
+ ('com.google.settings/partner', [
+ ('network_location_opt_in', 1),
+ ])
+]
+
+DISABLE_LOCATION_SETTINGS = [
+ ('com.google.settings/partner', [
+ ('use_location_for_services', 0),
+ ]),
+ ('settings/secure', [
# Ensure Geolocation is disabled.
- 'location_providers_allowed': '',
- }
-}
-
-DETERMINISTIC_DEVICE_SETTINGS = {
- 'com.google.settings/partner': {
- 'network_location_opt_in': 0,
- 'use_location_for_services': 1,
- },
- 'settings/global': {
- 'assisted_gps_enabled': 0,
+ ('location_providers_allowed', ''),
+ ]),
+]
+
+DETERMINISTIC_DEVICE_SETTINGS = [
+ ('settings/global', [
+ ('assisted_gps_enabled', 0),
# Disable "auto time" and "auto time zone" to avoid network-provided time
# to overwrite the device's datetime and timezone synchronized from host
# when running tests later. See b/6569849.
- 'auto_time': 0,
- 'auto_time_zone': 0,
+ ('auto_time', 0),
+ ('auto_time_zone', 0),
- 'development_settings_enabled': 1,
+ ('development_settings_enabled', 1),
# Flag for allowing ActivityManagerService to send ACTION_APP_ERROR intents
# on application crashes and ANRs. If this is disabled, the crash/ANR dialog
# will never display the "Report" button.
# Type: int ( 0 = disallow, 1 = allow )
- 'send_action_app_error': 0,
+ ('send_action_app_error', 0),
- 'stay_on_while_plugged_in': 3,
+ ('stay_on_while_plugged_in', 3),
- 'verifier_verify_adb_installs' : 0,
- },
- 'settings/secure': {
- 'allowed_geolocation_origins':
- 'http://www.google.co.uk http://www.google.com',
+ ('verifier_verify_adb_installs', 0),
+ ]),
+ ('settings/secure', [
+ ('allowed_geolocation_origins',
+ 'http://www.google.co.uk http://www.google.com'),
# Ensure that we never get random dialogs like "Unfortunately the process
# android.process.acore has stopped", which steal the focus, and make our
# automation fail (because the dialog steals the focus then mistakenly
# receives the injected user input events).
- 'anr_show_background': 0,
+ ('anr_show_background', 0),
- 'lockscreen.disabled': 1,
+ ('lockscreen.disabled', 1),
- 'screensaver_enabled': 0,
- },
- 'settings/system': {
+ ('screensaver_enabled', 0),
+ ]),
+ ('settings/system', [
# Don't want devices to accidentally rotate the screen as that could
# affect performance measurements.
- 'accelerometer_rotation': 0,
+ ('accelerometer_rotation', 0),
- 'lockscreen.disabled': 1,
+ ('lockscreen.disabled', 1),
# Turn down brightness and disable auto-adjust so that devices run cooler.
- 'screen_brightness': 5,
- 'screen_brightness_mode': 0,
-
- 'user_rotation': 0,
- },
-}
-
-
-NETWORK_DISABLED_SETTINGS = {
- 'settings/global': {
- 'airplane_mode_on': 1,
- 'wifi_on': 0,
- },
-}
+ ('screen_brightness', 5),
+ ('screen_brightness_mode', 0),
+
+ ('user_rotation', 0),
+ ]),
+]
+
+NETWORK_DISABLED_SETTINGS = [
+ ('settings/global', [
+ ('airplane_mode_on', 1),
+ ('wifi_on', 0),
+ ]),
+]
diff --git a/build/android/pylib/gtest/gtest_config.py b/build/android/pylib/gtest/gtest_config.py
index ebff3bc0aa..ce3aef4170 100644
--- a/build/android/pylib/gtest/gtest_config.py
+++ b/build/android/pylib/gtest/gtest_config.py
@@ -7,6 +7,7 @@
# Add new suites here before upgrading them to the stable list below.
EXPERIMENTAL_TEST_SUITES = [
'content_gl_tests',
+ 'heap_profiler_unittests',
]
# Do not modify this list without approval of an android owner.
diff --git a/build/android/pylib/utils/parallelizer.py b/build/android/pylib/utils/parallelizer.py
index 761455223c..9323c21ad4 100644
--- a/build/android/pylib/utils/parallelizer.py
+++ b/build/android/pylib/utils/parallelizer.py
@@ -155,6 +155,32 @@ class Parallelizer(object):
self.pFinish(timeout)
return self._objs
+ def pMap(self, f, *args, **kwargs):
+ """Map a function across the current wrapped objects in parallel.
+
+ This calls f(o, *args, **kwargs) for each o in the set of wrapped objects.
+
+ Note that this call is asynchronous. Call pFinish on the return value to
+ block until the call finishes.
+
+ Args:
+ f: The function to call.
+ args: The positional args to pass to f.
+ kwargs: The keyword args to pass to f.
+ Returns:
+ A Parallelizer wrapping the ReraiserThreadGroup running the map in
+ parallel.
+ """
+ self._assertNoShadow('pMap')
+ r = type(self)(self._orig_objs)
+ r._objs = reraiser_thread.ReraiserThreadGroup(
+ [reraiser_thread.ReraiserThread(
+ f, args=tuple([o] + list(args)), kwargs=kwargs,
+ name='%s(%s)' % (f.__name__, d))
+ for d, o in zip(self._orig_objs, self._objs)])
+ r._objs.StartAll() # pylint: disable=W0212
+ return r
+
def _assertNoShadow(self, attr_name):
"""Ensures that |attr_name| isn't shadowing part of the wrapped obejcts.
@@ -194,3 +220,23 @@ class SyncParallelizer(Parallelizer):
r.pFinish(None)
return r
+ #override
+ def pMap(self, f, *args, **kwargs):
+ """Map a function across the current wrapped objects in parallel.
+
+ This calls f(o, *args, **kwargs) for each o in the set of wrapped objects.
+
+ Note that this call is synchronous.
+
+ Args:
+ f: The function to call.
+ args: The positional args to pass to f.
+ kwargs: The keyword args to pass to f.
+ Returns:
+ A Parallelizer wrapping the ReraiserThreadGroup running the map in
+ parallel.
+ """
+ r = super(SyncParallelizer, self).pMap(f, *args, **kwargs)
+ r.pFinish(None)
+ return r
+
diff --git a/build/android/strip_native_libraries.gypi b/build/android/strip_native_libraries.gypi
index 40c00cb82f..bdffcfd049 100644
--- a/build/android/strip_native_libraries.gypi
+++ b/build/android/strip_native_libraries.gypi
@@ -48,7 +48,7 @@
'--android-strip-arg=--strip-unneeded',
'--stripped-libraries-dir=<(stripped_libraries_dir)',
'--libraries-dir=<(SHARED_LIB_DIR),<(PRODUCT_DIR)',
- '--libraries-file=<(ordered_libraries_file)',
+ '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
'--stamp=<(stamp)',
],
}
diff --git a/build/common.gypi b/build/common.gypi
index 609041c5d2..a82d24dd22 100644
--- a/build/common.gypi
+++ b/build/common.gypi
@@ -2936,7 +2936,8 @@
['<(chromeos)==1 and >(nacl_untrusted_build)==0', {
'defines': ['OS_CHROMEOS=1'],
}],
- ['enable_wexit_time_destructors==1', {
+ ['enable_wexit_time_destructors==1 and OS!="win"', {
+ # TODO: Enable on Windows too, http://crbug.com/404525
'variables': { 'clang_warning_flags': ['-Wexit-time-destructors']},
}],
['chromium_code==0', {
@@ -3230,6 +3231,30 @@
],
},
}],
+ ['clang==1', {
+ 'cflags': [
+ # Allow comparing the address of references and 'this' against 0
+ # in debug builds. Technically, these can never be null in
+ # well-defined C/C++ and Clang can optimize such checks away in
+ # release builds, but they may be used in asserts in debug builds.
+ '-Wno-undefined-bool-conversion',
+ '-Wno-tautological-undefined-compare',
+ ],
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-Wno-undefined-bool-conversion',
+ '-Wno-tautological-undefined-compare',
+ ],
+ },
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'AdditionalOptions': [
+ '-Wno-undefined-bool-conversion',
+ '-Wno-tautological-undefined-compare',
+ ],
+ },
+ },
+ }],
],
},
'Release_Base': {
@@ -3707,19 +3732,36 @@
['target_arch=="arm"', {
'target_conditions': [
['_toolset=="target"', {
- 'cflags_cc': [
- # The codesourcery arm-2009q3 toolchain warns at that the ABI
- # has changed whenever it encounters a varargs function. This
- # silences those warnings, as they are not helpful and
- # clutter legitimate warnings.
- '-Wno-abi',
- ],
'conditions': [
+ ['clang==0', {
+ 'cflags_cc': [
+ # The codesourcery arm-2009q3 toolchain warns at that the ABI
+ # has changed whenever it encounters a varargs function. This
+ # silences those warnings, as they are not helpful and
+ # clutter legitimate warnings.
+ '-Wno-abi',
+ ],
+ }],
+ ['clang==1 and arm_arch!="" and OS!="android"', {
+ 'cflags': [
+ '-target arm-linux-gnueabihf',
+ ],
+ 'ldflags': [
+ '-target arm-linux-gnueabihf',
+ ],
+ }],
['arm_arch!=""', {
'cflags': [
'-march=<(arm_arch)',
],
}],
+ ['clang==1 and OS!="android"', {
+ 'cflags': [
+ # We need to disable clang's builtin assembler as it can't
+ # handle several asm files, crbug.com/124610
+ '-no-integrated-as',
+ ],
+ }],
['arm_tune!=""', {
'cflags': [
'-mtune=<(arm_tune)',
@@ -3990,7 +4032,7 @@
'conditions': [
['use_sanitizer_options==1 and OS=="linux" and (chromeos==0 or target_arch!="ia32")', {
'dependencies': [
- '<(DEPTH)/base/base.gyp:sanitizer_options',
+ '<(DEPTH)/build/sanitizers/sanitizers.gyp:sanitizer_options',
],
}],
],
diff --git a/build/config/android/config.gni b/build/config/android/config.gni
index f509677441..29d739a438 100644
--- a/build/config/android/config.gni
+++ b/build/config/android/config.gni
@@ -87,14 +87,20 @@ if (is_android) {
# only need to define the current one, rather than one for every platform
# like the toolchain roots.
if (cpu_arch == "x86") {
+ android_prebuilt_arch = "android-x86"
+ _binary_prefix = "i686-linux-android"
android_toolchain_root = "$x86_android_toolchain_root"
android_libgcc_file =
"$android_toolchain_root/lib/gcc/i686-linux-android/${_android_toolchain_version}/libgcc.a"
} else if (cpu_arch == "arm") {
+ android_prebuilt_arch = "android-arm"
+ _binary_prefix = "arm-linux-androideabi"
android_toolchain_root = "$arm_android_toolchain_root"
android_libgcc_file =
"$android_toolchain_root/lib/gcc/arm-linux-androideabi/${_android_toolchain_version}/libgcc.a"
} else if (cpu_arch == "mipsel") {
+ android_prebuilt_arch = "android-mips"
+ _binary_prefix = "mipsel-linux-android"
android_toolchain_root = "$mips_android_toolchain_root"
android_libgcc_file =
"$android_toolchain_root/lib/gcc/mipsel-linux-android/${_android_toolchain_version}/libgcc.a"
@@ -102,6 +108,9 @@ if (is_android) {
assert(false, "Need android libgcc support for your target arch.")
}
+ android_readelf = "$android_toolchain_root/bin/$_binary_prefix-readelf"
+ android_gdbserver = "$android_ndk_root/prebuilt/$android_prebuilt_arch/gdbserver/gdbserver"
+
# stlport stuff --------------------------------------------------------------
use_system_stlport = is_android_webview_build
diff --git a/build/config/android/internal_rules.gni b/build/config/android/internal_rules.gni
index a72a1bec19..376b4fa040 100644
--- a/build/config/android/internal_rules.gni
+++ b/build/config/android/internal_rules.gni
@@ -11,6 +11,47 @@ rebased_android_sdk = rebase_path(android_sdk, root_build_dir)
rebased_android_sdk_root = rebase_path(android_sdk_root, root_build_dir)
rebased_android_sdk_build_tools = rebase_path(android_sdk_build_tools, root_build_dir)
+android_sdk_jar = "$android_sdk/android.jar"
+rebased_android_sdk_jar = rebase_path(android_sdk_jar, root_build_dir)
+
+template("android_lint") {
+ jar_path = invoker.jar_path
+ android_manifest = invoker.android_manifest
+ java_files = invoker.java_files
+ base_path = "$target_gen_dir/$target_name"
+
+ action(target_name) {
+ script = "//build/android/gyp/lint.py"
+ result_path = base_path + "/result.xml"
+ config_path = base_path + "/config.xml"
+ suppressions_file = "//build/android/lint/suppressions.xml"
+ inputs = [
+ suppressions_file,
+ android_manifest,
+ jar_path,
+ ] + java_files
+
+ outputs = [
+ config_path,
+ result_path
+ ]
+
+ rebased_java_files = rebase_path(java_files, root_build_dir)
+
+ args = [
+ "--lint-path=$rebased_android_sdk_root/tools/lint",
+ "--config-path", rebase_path(suppressions_file, root_build_dir),
+ "--manifest-path", rebase_path(android_manifest, root_build_dir),
+ "--product-dir=.",
+ "--jar-path", rebase_path(jar_path, root_build_dir),
+ "--processed-config-path", rebase_path(config_path, root_build_dir),
+ "--result-path", rebase_path(result_path, root_build_dir),
+ "--java-files=$rebased_java_files",
+ "--enable",
+ ]
+ }
+}
+
# Write the target's .build_config file. This is a json file that contains a
# dictionary of information about how to build this target (things that
@@ -73,6 +114,17 @@ template("write_build_config") {
]
}
+ if (type == "android_apk") {
+ if (defined(invoker.native_libs)) {
+ rebased_native_libs = rebase_path(invoker.native_libs, root_build_dir)
+ rebased_android_readelf = rebase_path(android_readelf, root_build_dir)
+ args += [
+ "--native-libs=$rebased_native_libs",
+ "--readelf-path=$rebased_android_readelf",
+ ]
+ }
+ }
+
if (defined(invoker.srcjar)) {
args += [
"--srcjar", rebase_path(invoker.srcjar, root_build_dir)
@@ -283,6 +335,72 @@ template("create_apk") {
}
}
+template("java_prebuilt") {
+ _input_jar_path = invoker.input_jar_path
+ _output_jar_path = invoker.output_jar_path
+ _jar_toc_path = _output_jar_path + ".TOC"
+
+ assert(invoker.build_config != "")
+
+ if (defined(invoker.proguard_preprocess) && invoker.proguard_preprocess) {
+ _proguard_jar_path = "$android_sdk_root/tools/proguard/lib/proguard.jar"
+ _proguard_config_path = invoker.proguard_config
+ _build_config = invoker.build_config
+ _rebased_build_config = rebase_path(_build_config, root_build_dir)
+ action("${target_name}__proguard_process") {
+ script = "//build/android/gyp/proguard.py"
+ inputs = [
+ android_sdk_jar,
+ _proguard_jar_path,
+ _build_config,
+ _input_jar_path,
+ _proguard_config_path,
+ ]
+ depfile = "${target_gen_dir}/${target_name}.d"
+ outputs = [
+ depfile,
+ _output_jar_path,
+ ]
+ args = [
+ "--depfile", rebase_path(depfile, root_build_dir),
+ "--proguard-path", rebase_path(_proguard_jar_path, root_build_dir),
+ "--input-path", rebase_path(_input_jar_path, root_build_dir),
+ "--output-path", rebase_path(_output_jar_path, root_build_dir),
+ "--proguard-config", rebase_path(_proguard_config_path, root_build_dir),
+ "--classpath", rebased_android_sdk_jar,
+ "--classpath=@FileArg($_rebased_build_config:javac:classpath)",
+ ]
+ }
+ } else {
+ copy("${target_name}__copy_jar") {
+ sources = [_input_jar_path]
+ outputs = [_output_jar_path]
+ }
+ }
+
+ action("${target_name}__jar_toc") {
+ script = "//build/android/gyp/jar_toc.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ outputs = [
+ depfile,
+ _jar_toc_path,
+ _jar_toc_path + ".md5.stamp"
+ ]
+ inputs = [ _output_jar_path ]
+ args = [
+ "--depfile", rebase_path(depfile, root_build_dir),
+ "--jar-path", rebase_path(_output_jar_path, root_build_dir),
+ "--toc-path", rebase_path(_jar_toc_path, root_build_dir),
+ ]
+ }
+
+ group(target_name) {
+ deps = [
+ ":${target_name}__jar_toc"
+ ]
+ }
+}
+
# Compiles and jars a set of java files.
#
# Outputs:
@@ -293,7 +411,7 @@ template("create_apk") {
# java_files: List of .java files to compile.
# java_deps: List of java dependencies. These should all have a .jar output
# at "${target_gen_dir}/${target_name}.jar.
-# chromium_code: If 1, enable extra warnings.
+# chromium_code: If true, enable extra warnings.
# srcjar_deps: List of srcjar dependencies. The .java files contained in the
# dependencies srcjar outputs will be compiled and added to the output jar.
# jar_path: Use this to explicitly set the output jar path. Defaults to
@@ -303,93 +421,83 @@ template("java_library") {
assert(defined(invoker.build_config))
assert(defined(invoker.jar_path))
- java_files = invoker.java_files
- jar_path = invoker.jar_path
- jar_toc_path = jar_path + ".TOC"
+ _java_files = invoker.java_files
+ _final_jar_path = invoker.jar_path
+ _intermediate_jar_path = "$target_gen_dir/$target_name.initial.jar"
- build_config = invoker.build_config
+ _build_config = invoker.build_config
- jar_excluded_patterns = []
+ _jar_excluded_patterns = []
if (defined(invoker.jar_excluded_patterns)) {
- jar_excluded_patterns += invoker.jar_excluded_patterns
+ _jar_excluded_patterns += invoker.jar_excluded_patterns
}
- chromium_code = false
+ _chromium_code = false
if (defined(invoker.chromium_code)) {
- chromium_code = chromium_code || invoker.chromium_code
+ _chromium_code = invoker.chromium_code
}
- srcjar_deps = []
+ _srcjar_deps = []
if (defined(invoker.srcjar_deps)) {
- srcjar_deps += invoker.srcjar_deps
+ _srcjar_deps += invoker.srcjar_deps
}
- java_srcjars = []
- foreach(dep, srcjar_deps) {
- dep_gen_dir = get_label_info(dep, "target_gen_dir")
- dep_name = get_label_info(dep, "name")
- java_srcjars += [ "$dep_gen_dir/$dep_name.srcjar" ]
+ _java_srcjars = []
+ foreach(dep, _srcjar_deps) {
+ _dep_gen_dir = get_label_info(dep, "target_gen_dir")
+ _dep_name = get_label_info(dep, "name")
+ _java_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ]
}
# Mark srcjar_deps as used.
- assert(srcjar_deps == [] || srcjar_deps != [])
-
- rebase_jar_path = rebase_path(jar_path, root_build_dir)
+ assert(_srcjar_deps == [] || true)
- system_jars = [ "${android_sdk}/android.jar" ]
+ _system_jars = [ android_sdk_jar ]
action("${target_name}__javac") {
script = "//build/android/gyp/javac.py"
depfile = "$target_gen_dir/$target_name.d"
outputs = [
depfile,
- jar_path,
- jar_path + ".md5.stamp"
+ _intermediate_jar_path,
+ _intermediate_jar_path + ".md5.stamp"
]
- sources = java_files + java_srcjars
- inputs = system_jars + [ build_config ]
-
- rebase_system_jars = rebase_path(system_jars, root_build_dir)
- rebase_java_srcjars = rebase_path(java_srcjars, root_build_dir)
- rebase_build_config = rebase_path(build_config, root_build_dir)
- rebase_depfile = rebase_path(depfile, root_build_dir)
+ sources = _java_files + _java_srcjars
+ inputs = _system_jars + [ _build_config ]
+
+ _rebased_system_jars = rebase_path(_system_jars, root_build_dir)
+ _rebased_java_srcjars = rebase_path(_java_srcjars, root_build_dir)
+ _rebased_build_config = rebase_path(_build_config, root_build_dir)
+ _rebased_depfile = rebase_path(depfile, root_build_dir)
+ _rebased_jar_path = rebase_path(_intermediate_jar_path, root_build_dir)
args = [
- "--depfile=$rebase_depfile",
- "--classpath=$rebase_system_jars",
- "--classpath=@FileArg($rebase_build_config:javac:classpath)",
- "--jar-path=$rebase_jar_path",
- "--java-srcjars=$rebase_java_srcjars",
- "--java-srcjars=@FileArg($rebase_build_config:javac:srcjars)",
- "--jar-excluded-classes=$jar_excluded_patterns",
+ "--depfile=$_rebased_depfile",
+ "--classpath=$_rebased_system_jars",
+ "--classpath=@FileArg($_rebased_build_config:javac:classpath)",
+ "--jar-path=$_rebased_jar_path",
+ "--java-srcjars=$_rebased_java_srcjars",
+ "--java-srcjars=@FileArg($_rebased_build_config:javac:srcjars)",
+ "--jar-excluded-classes=$_jar_excluded_patterns",
]
- if (chromium_code) {
+ if (_chromium_code) {
args += [ "--chromium-code" ]
}
- args += rebase_path(java_files, root_build_dir)
+ args += rebase_path(_java_files, root_build_dir)
}
- # TODO(cjhopman): proguard
-
- rebase_jar_toc_path = rebase_path(jar_toc_path, root_build_dir)
- action("${target_name}__jar_toc") {
- script = "//build/android/gyp/jar_toc.py"
- depfile = "$target_gen_dir/$target_name.d"
- outputs = [
- depfile,
- jar_toc_path,
- jar_toc_path + ".md5.stamp"
- ]
- inputs = [ jar_path ]
- args = [
- "--depfile", rebase_path(depfile, root_build_dir),
- "--jar-path=${rebase_jar_path}",
- "--toc-path=${rebase_jar_toc_path}",
- ]
+ java_prebuilt("${target_name}__finish") {
+ build_config = _build_config
+ input_jar_path = _intermediate_jar_path
+ output_jar_path = _final_jar_path
+ if (defined(invoker.proguard_preprocess) && invoker.proguard_preprocess) {
+ proguard_preprocess = invoker.proguard_preprocess
+ proguard_config = invoker.proguard_config
+ }
}
group(target_name) {
deps = [
":${target_name}__javac",
- ":${target_name}__jar_toc",
+ ":${target_name}__finish",
]
}
}
@@ -408,6 +516,14 @@ template("android_java_library") {
_jar_path = invoker.jar_path
_dex_path = invoker.dex_path
+ _android_manifest = "//build/android/AndroidManifest.xml"
+ if (defined(invoker.android_manifest)) {
+ _android_manifest = invoker.android_manifest
+ }
+ assert(_android_manifest != "")
+
+ _final_deps = []
+
java_library("${target_name}__java_library") {
jar_path = _jar_path
if (defined(invoker.jar_excluded_patterns)) {
@@ -419,10 +535,20 @@ template("android_java_library") {
if (defined(invoker.srcjar_deps)) {
srcjar_deps = invoker.srcjar_deps
}
+ if (defined(invoker.proguard_preprocess) && invoker.proguard_preprocess) {
+ proguard_preprocess = invoker.proguard_preprocess
+ proguard_config = invoker.proguard_config
+ }
}
- # TODO(cjhopman): lint
-
+ if (defined(invoker.chromium_code) && invoker.chromium_code) {
+ _final_deps += [ ":${target_name}__lint" ]
+ android_lint("${target_name}__lint") {
+ android_manifest = _android_manifest
+ jar_path = _jar_path
+ java_files = invoker.java_files
+ }
+ }
dex("${target_name}__dex") {
sources = [_jar_path]
@@ -433,11 +559,10 @@ template("android_java_library") {
deps = [
":${target_name}__java_library",
":${target_name}__dex",
- ]
+ ] + _final_deps
}
}
-
# Runs process_resources.py
template("process_resources") {
zip_path = invoker.zip_path
@@ -515,3 +640,29 @@ template("process_resources") {
}
}
}
+
+template("copy_ex") {
+ action(target_name) {
+ script = "//build/android/gyp/copy_ex.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ outputs = [
+ depfile,
+ ]
+
+ inputs = []
+ if (defined(invoker.inputs)) {
+ inputs += invoker.inputs
+ }
+
+ args = [
+ "--depfile", rebase_path(depfile, root_build_dir),
+ "--dest", rebase_path(invoker.dest, root_build_dir),
+ ]
+ if (defined(invoker.args)) {
+ args += invoker.args
+ }
+ if (defined(invoker.clear_dir) && invoker.clear_dir) {
+ args += ["--clear"]
+ }
+ }
+}
diff --git a/build/config/android/rules.gni b/build/config/android/rules.gni
index 61a8322324..43d79d0153 100644
--- a/build/config/android/rules.gni
+++ b/build/config/android/rules.gni
@@ -237,6 +237,12 @@ template("java_cpp_template") {
"--output", rebase_path(gen_dir, root_build_dir) + "/{{source_name_part}}.java",
"--template={{source}}",
]
+
+ if (defined(invoker.defines)) {
+ foreach(def, invoker.defines) {
+ args += ["--defines", def]
+ }
+ }
}
apply_gcc_outputs = get_target_outputs(":${target_name}__apply_gcc")
@@ -399,9 +405,12 @@ template("java_strings_grd") {
# java_files: List of .java files included in this library.
# srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
# will be added to java_files and be included in this library.
-#
+# chromium_code: If true, extra static analysis warning/errors will be enabled.
# jar_excluded_patterns: List of patterns of .class files to exclude from the
# final jar.
+# proguard_preprocess: If true, proguard preprocessing will be run. This can
+# be used to remove unwanted parts of the library.
+# proguard_config: Path to the proguard config for preprocessing.
#
# Example
# android_library("foo_java") {
@@ -438,10 +447,21 @@ template("android_library") {
# base_path
}
+ _chromium_code = true
+ if (defined(invoker.chromium_code)) {
+ _chromium_code = invoker.chromium_code
+ }
+
android_java_library(target_name) {
+ chromium_code = _chromium_code
java_files = invoker.java_files
build_config = build_config
+ if (defined(invoker.proguard_preprocess) && invoker.proguard_preprocess) {
+ proguard_preprocess = true
+ proguard_config = invoker.proguard_config
+ }
+
if (defined(invoker.jar_excluded_patterns)) {
jar_excluded_patterns = invoker.jar_excluded_patterns
}
@@ -453,6 +473,72 @@ template("android_library") {
}
+# Declare an Android library target for a prebuilt jar
+#
+# This target creates an Android library containing java code and Android
+# resources.
+#
+# Variables
+# deps: Specifies the dependencies of this target. Java targets in this list
+# will be added to the javac classpath. Android resources in dependencies
+# will be used when building this library.
+# jar_path: Path to the prebuilt jar.
+# proguard_preprocess: If true, proguard preprocessing will be run. This can
+# be used to remove unwanted parts of the library.
+# proguard_config: Path to the proguard config for preprocessing.
+#
+# Example
+# android_java_prebuilt("foo_java") {
+# jar_path = "foo.jar"
+# deps = [
+# ":foo_resources",
+# ":bar_java"
+# ]
+# }
+template("android_java_prebuilt") {
+ assert(defined(invoker.jar_path))
+ _base_path = "${target_gen_dir}/$target_name"
+ _jar_path = _base_path + ".jar"
+ _dex_path = _base_path + ".dex.jar"
+ _build_config = _base_path + ".build_config"
+
+ write_build_config("${target_name}__build_config") {
+ type = "android_library"
+
+ deps = []
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ build_config = _build_config
+ jar_path = _jar_path
+ dex_path = _dex_path
+ }
+
+ java_prebuilt("${target_name}__process_jar") {
+ if (defined(invoker.proguard_preprocess) && invoker.proguard_preprocess) {
+ proguard_preprocess = true
+ proguard_config = invoker.proguard_config
+ }
+
+ build_config = _build_config
+ input_jar_path = invoker.jar_path
+ output_jar_path = _jar_path
+ }
+
+ dex("${target_name}__dex") {
+ sources = [_jar_path]
+ output = _dex_path
+ }
+
+ group(target_name) {
+ deps = [
+ ":${target_name}__dex",
+ ]
+ }
+}
+
+
+
# Declare an Android apk target
#
# This target creates an Android APK containing java code, resources, assets,
@@ -502,10 +588,19 @@ template("android_apk") {
resource_srcjar_path = "$base_path.resources.srcjar"
jar_path = "$base_path.jar"
final_dex_path = "$gen_dir/classes.dex"
+ _template_name = target_name
- # Just mark these as used for now.
- assert(!defined(invoker.native_libs)
- || invoker.native_libs == [] || true)
+ _native_lib_version_name = ""
+
+ _use_chromium_linker = false
+ _enable_chromium_linker_tests = false
+ _load_library_from_apk = false
+
+ _native_libs_dir = base_path + "/libs"
+ _native_libs = []
+ if (defined(invoker.native_libs)) {
+ _native_libs = invoker.native_libs
+ }
_keystore_path = android_default_keystore_path
_keystore_name = android_default_keystore_name
@@ -517,13 +612,13 @@ template("android_apk") {
_keystore_password = invoker.keystore_password
}
- # TODO(cjhopman): Remove this once we correctly generate the real
- # NativeLibraries.java
- srcjar_deps = [ "//base:base_native_libraries_gen" ]
+ _srcjar_deps = []
if (defined(invoker.srcjar_deps)) {
- srcjar_deps += invoker.srcjar_deps
+ _srcjar_deps += invoker.srcjar_deps
}
+ _rebased_build_config = rebase_path(build_config, root_build_dir)
+
write_build_config("${target_name}__build_config") {
type = "android_apk"
srcjar = resource_srcjar_path
@@ -533,6 +628,8 @@ template("android_apk") {
if (defined(invoker.deps)) {
deps = invoker.deps
}
+
+ native_libs = _native_libs
}
final_deps = []
@@ -548,11 +645,39 @@ template("android_apk") {
generate_constant_ids = true
}
+ java_cpp_template("${target_name}__native_libraries_java") {
+ package_name = "org/chromium/base/library_loader"
+ sources = [
+ "//base/android/java/templates/NativeLibraries.template",
+ ]
+ inputs = [
+ build_config,
+ ]
+
+ defines = [
+ "NATIVE_LIBRARIES_LIST=" +
+ "@FileArg($_rebased_build_config:native:java_libraries_list)",
+ "NATIVE_LIBRARIES_VERSION_NUMBER=\"$_native_lib_version_name\"",
+ ]
+ if (_use_chromium_linker) {
+ defines += ["ENABLED_CHROMIUM_LINKER"]
+ }
+ if (_load_library_from_apk) {
+ defines += ["ENABLE_CHROMIUM_LINKER_LIBRARY_IN_ZIP_FILE"]
+ }
+ if (_enable_chromium_linker_tests) {
+ defines += ["ENABLE_CHROMIUM_LINKER_TESTS"]
+ }
+ }
+ _srcjar_deps += [ ":${target_name}__native_libraries_java" ]
+
rebased_build_config = rebase_path(build_config, root_build_dir)
final_deps += [":${target_name}__java"]
android_java_library("${target_name}__java") {
+ android_manifest = invoker.android_manifest
java_files = invoker.java_files
+ srcjar_deps = _srcjar_deps
dex_path = base_path + ".dex.jar"
}
@@ -565,6 +690,23 @@ template("android_apk") {
args = ["--inputs=@FileArg($dex_arg_key)"]
}
+ if (_native_libs != []) {
+ copy_ex("${target_name}__prepare_native") {
+ clear_dir = true
+ inputs = [build_config]
+ dest = "$_native_libs_dir/$android_app_abi"
+ args = [
+ "--files=@FileArg(${rebased_build_config}:native:libraries)",
+ ]
+ if (is_debug) {
+ rebased_gdbserver = rebase_path(android_gdbserver, root_build_dir)
+ args += [
+ "--files=[\"$rebased_gdbserver\"]"
+ ]
+ }
+ }
+ }
+
final_deps += [":${target_name}__create"]
create_apk("${target_name}__create") {
apk_path = invoker.final_apk_path
@@ -576,7 +718,10 @@ template("android_apk") {
keystore_path = _keystore_path
keystore_password = _keystore_password
- # TODO: native libs
+ if (_native_libs != []) {
+ native_libs_dir = _native_libs_dir
+ deps = [":${_template_name}__prepare_native"]
+ }
}
group(target_name) {
@@ -595,6 +740,12 @@ template("android_apk") {
# resource dependencies of the apk.
# unittests_dep: This should be the label of the gtest native target. This
# target must be defined previously in the same file.
+# unittests_binary: The name of the binary produced by the unittests_dep
+# target, relative to the root build directory. If unspecified, it assumes
+# the name of the unittests_dep target (which will be correct unless that
+# target specifies an "output_name".
+# TODO(brettw) make this automatic by allowing get_target_outputs to
+# support executables.
#
# Example
# unittest_apk("foo_unittests_apk") {
@@ -602,7 +753,16 @@ template("android_apk") {
# unittests_dep = ":foo_unittests"
# }
template("unittest_apk") {
+ assert(defined(invoker.unittests_dep), "Need unittests_dep for $target_name")
+
test_suite_name = get_label_info(invoker.unittests_dep, "name")
+
+ if (defined(invoker.unittests_binary)) {
+ unittests_binary = root_out_dir + "/" + invoker.unittests_binary
+ } else {
+ unittests_binary = root_out_dir + "/" + test_suite_name
+ }
+
android_apk(target_name) {
apk_name = test_suite_name
final_apk_path = "$root_build_dir/${apk_name}_apk/${apk_name}-debug.apk"
@@ -610,7 +770,7 @@ template("unittest_apk") {
"//testing/android/java/src/org/chromium/native_test/ChromeNativeTestActivity.java"
]
android_manifest = "//testing/android/java/AndroidManifest.xml"
- unittests_outputs = get_target_outputs(invoker.unittests_dep)
+ unittests_outputs = [ unittests_binary ]
native_libs = [unittests_outputs[0]]
if (defined(invoker.deps)) {
deps = invoker.deps
diff --git a/build/grit_target.gypi b/build/grit_target.gypi
index fe9900b54e..179f986573 100644
--- a/build/grit_target.gypi
+++ b/build/grit_target.gypi
@@ -7,8 +7,9 @@
# defined:
# grit_out_dir: string: the output directory path
-# NOTE: This file is optional, not all targets that use grit include it, some
-# do their own custom directives instead.
+# DO NOT USE THIS FILE. Instead, use qualified includes.
+# TODO: Convert everything to qualified includes, and delete this file,
+# http://crbug.com/401588
{
'conditions': [
# If the target is a direct binary, it needs to be able to find the header,
diff --git a/build/gyp_chromium b/build/gyp_chromium
index f87761dbd6..b8fe82dc5c 100755
--- a/build/gyp_chromium
+++ b/build/gyp_chromium
@@ -188,6 +188,13 @@ def additional_include_files(supplemental_files, args=[]):
if __name__ == '__main__':
+ # Disabling garbage collection saves about 1 second out of 16 on a Linux
+ # z620 workstation. Since this is a short-lived process it's not a problem to
+ # leak a few cyclyc references in order to spare the CPU cycles for
+ # scanning the heap.
+ import gc
+ gc.disable()
+
args = sys.argv[1:]
use_analyzer = len(args) and args[0] == '--analyzer'
diff --git a/build/install-build-deps-android.sh b/build/install-build-deps-android.sh
index ae46fa24a1..f5d288a726 100755
--- a/build/install-build-deps-android.sh
+++ b/build/install-build-deps-android.sh
@@ -47,6 +47,11 @@ sudo apt-get -y install lighttpd python-pexpect xvfb x11-utils
# Few binaries in the Android SDK require 32-bit libraries on the host.
sudo apt-get -y install lib32z1 g++-multilib
+# On Trusty-based systems you can't compile V8's mksnapshot without this one.
+# It is compiled for the host, using the -m32 flag, so it needs some 32 bit
+# development support. It seems harmless on older Linux releases.
+sudo apt-get -y install linux-libc-dev:i386
+
sudo apt-get -y install ant
# Install openjdk and openjre 7 stuff
diff --git a/build/install-build-deps.sh b/build/install-build-deps.sh
index 15e0dfeb93..a15edef94c 100755
--- a/build/install-build-deps.sh
+++ b/build/install-build-deps.sh
@@ -12,7 +12,6 @@ usage() {
echo "Usage: $0 [--options]"
echo "Options:"
echo "--[no-]syms: enable or disable installation of debugging symbols"
- echo "--[no-]lib32: enable or disable installation of 32 bit libraries"
echo "--[no-]arm: enable or disable installation of arm cross toolchain"
echo "--[no-]chromeos-fonts: enable or disable installation of Chrome OS"\
"fonts"
@@ -45,6 +44,7 @@ do
case "$1" in
--syms) do_inst_syms=1;;
--no-syms) do_inst_syms=0;;
+ # TODO(phajdan.jr): Remove the lib32 flags when nothing else refers to them.
--lib32) do_inst_lib32=1;;
--no-lib32) do_inst_lib32=0;;
--arm) do_inst_arm=1;;
@@ -91,27 +91,26 @@ if [ "x$(id -u)" != x0 ] && [ 0 -eq "${do_quick_check-0}" ]; then
fi
# Packages needed for chromeos only
-chromeos_dev_list="libbluetooth-dev"
+chromeos_dev_list="libbluetooth-dev libxkbcommon-dev"
# Packages needed for development
dev_list="apache2.2-bin bison curl dpkg-dev elfutils devscripts fakeroot flex
- fonts-thai-tlwg g++ git-core gperf language-pack-da language-pack-fr
- language-pack-he language-pack-zh-hant libapache2-mod-php5
- libasound2-dev libbrlapi-dev libav-tools libbz2-dev libcairo2-dev
- libcap-dev libcups2-dev libcurl4-gnutls-dev libdrm-dev libelf-dev
- libexif-dev libgconf2-dev libgl1-mesa-dev libglib2.0-dev
- libglu1-mesa-dev libgnome-keyring-dev libgtk2.0-dev libkrb5-dev
- libnspr4-dev libnss3-dev libpam0g-dev libpci-dev libpulse-dev
- libsctp-dev libspeechd-dev libsqlite3-dev libssl-dev libudev-dev
- libwww-perl libxslt1-dev libxss-dev libxt-dev libxtst-dev
+ fonts-thai-tlwg g++ git-core git-svn gperf language-pack-da
+ language-pack-fr language-pack-he language-pack-zh-hant
+ libapache2-mod-php5 libasound2-dev libbrlapi-dev libav-tools
+ libbz2-dev libcairo2-dev libcap-dev libcups2-dev libcurl4-gnutls-dev
+ libdrm-dev libelf-dev libexif-dev libgconf2-dev libgl1-mesa-dev
+ libglib2.0-dev libglu1-mesa-dev libgnome-keyring-dev libgtk2.0-dev
+ libkrb5-dev libnspr4-dev libnss3-dev libpam0g-dev libpci-dev
+ libpulse-dev libsctp-dev libspeechd-dev libsqlite3-dev libssl-dev
+ libudev-dev libwww-perl libxslt1-dev libxss-dev libxt-dev libxtst-dev
mesa-common-dev openbox patch perl php5-cgi pkg-config python
python-cherrypy3 python-dev python-psutil rpm ruby subversion
ttf-dejavu-core ttf-indic-fonts ttf-kochi-gothic ttf-kochi-mincho
wdiff xfonts-mathml zip $chromeos_dev_list"
# 64-bit systems need a minimum set of 32-bit compat packages for the pre-built
-# NaCl binaries. These are always needed, regardless of whether or not we want
-# the full 32-bit "cross-compile" support (--lib32).
+# NaCl binaries.
if file /sbin/init | grep -q 'ELF 64-bit'; then
dev_list="${dev_list} libc6-i386 lib32gcc1 lib32stdc++6"
fi
@@ -405,216 +404,3 @@ if test "$do_inst_nacl" = "1"; then
else
echo "Skipping symbolic links for NaCl."
fi
-
-# Install 32bit backwards compatibility support for 64bit systems
-if file /sbin/init | grep -q 'ELF 64-bit'; then
- if test "$do_inst_lib32" != "1"
- then
- echo "NOTE: If you were expecting the option to install 32bit libs,"
- echo "please run with the --lib32 flag."
- echo
- echo "Installation complete."
- exit 0
- else
- # This conditional statement has been added to deprecate and eventually
- # remove support for 32bit libraries on 64bit systems. But for the time
- # being, we still have to support a few legacy systems (e.g. bots), where
- # this feature is needed.
- # We only even give the user the option to install these libraries, if
- # they explicitly requested doing so by setting the --lib32 command line
- # flag.
- # And even then, we interactively ask them one more time whether they are
- # absolutely sure.
- # In order for that to work, we must reset the ${do_inst_lib32} variable.
- # There are other ways to achieve the same goal. But resetting the
- # variable is the best way to document the intended behavior -- and to
- # allow us to gradually deprecate and then remove the obsolete code.
- if test "${do_default-0}" -ne 1; then
- do_inst_lib32=
- fi
- fi
-
- echo "WARNING"
- echo
- echo "We no longer recommend that you use this script to install"
- echo "32bit libraries on a 64bit system. Instead, consider using the"
- echo "install-chroot.sh script to help you set up a 32bit environment"
- echo "for building and testing 32bit versions of Chrome."
- echo
- echo "The code for installing 32bit libraries on a 64bit system is"
- echo "unmaintained and might not work with modern versions of Ubuntu"
- echo "or Debian."
- if test "$do_inst_lib32" != "" ; then
- echo
- echo -n "Are you sure you want to proceed (y/N) "
- if yes_no 1; then
- do_inst_lib32=1
- fi
- fi
- if test "$do_inst_lib32" != "1"
- then
- exit 0
- fi
-
- # Standard 32bit compatibility libraries
- echo "First, installing the limited existing 32-bit support..."
- cmp_list="ia32-libs lib32asound2-dev lib32stdc++6 lib32z1
- lib32z1-dev libc6-dev-i386 libc6-i386 g++-multilib"
- if [ -n "`apt-cache search lib32readline-gplv2-dev 2>/dev/null`" ]; then
- cmp_list="${cmp_list} lib32readline-gplv2-dev"
- else
- cmp_list="${cmp_list} lib32readline5-dev"
- fi
- sudo apt-get install ${do_quietly-} $cmp_list
-
- tmp=/tmp/install-32bit.$$
- trap 'rm -rf "${tmp}"' EXIT INT TERM QUIT
- mkdir -p "${tmp}/apt/lists/partial" "${tmp}/cache" "${tmp}/partial"
- touch "${tmp}/status"
-
- [ -r /etc/apt/apt.conf ] && cp /etc/apt/apt.conf "${tmp}/apt/"
- cat >>"${tmp}/apt/apt.conf" <<EOF
- Apt::Architecture "i386";
- Dir::Cache "${tmp}/cache";
- Dir::Cache::Archives "${tmp}/";
- Dir::State::Lists "${tmp}/apt/lists/";
- Dir::State::status "${tmp}/status";
-EOF
-
- # Download 32bit packages
- echo "Computing list of available 32bit packages..."
- sudo apt-get -c="${tmp}/apt/apt.conf" update
-
- echo "Downloading available 32bit packages..."
- sudo apt-get -c="${tmp}/apt/apt.conf" \
- --yes --download-only --force-yes --reinstall install \
- ${lib_list} ${dbg_list}
-
- # Open packages, remove everything that is not a library, move the
- # library to a lib32 directory and package everything as a *.deb file.
- echo "Repackaging and installing 32bit packages for use on 64bit systems..."
- for i in ${lib_list} ${dbg_list}; do
- orig="$(echo "${tmp}/${i}"_*_i386.deb)"
- compat="$(echo "${orig}" |
- sed -e 's,\(_[^_/]*_\)i386\(.deb\),-ia32\1amd64\2,')"
- rm -rf "${tmp}/staging"
- msg="$(fakeroot -u sh -exc '
- # Unpack 32bit Debian archive
- umask 022
- mkdir -p "'"${tmp}"'/staging/dpkg/DEBIAN"
- cd "'"${tmp}"'/staging"
- ar x "'${orig}'"
- tar Cfx dpkg data.tar*
- tar zCfx dpkg/DEBIAN control.tar.gz
-
- # Create a posix extended regular expression fragment that will
- # recognize the includes which have changed. Should be rare,
- # will almost always be empty.
- includes=`sed -n -e "s/^[0-9a-z]* //g" \
- -e "\,usr/include/,p" dpkg/DEBIAN/md5sums |
- xargs -n 1 -I FILE /bin/sh -c \
- "cmp -s dpkg/FILE /FILE || echo FILE" |
- tr "\n" "|" |
- sed -e "s,|$,,"`
-
- # If empty, set it to not match anything.
- test -z "$includes" && includes="^//"
-
- # Turn the conflicts into an extended RE for removal from the
- # Provides line.
- conflicts=`sed -n -e "/Conflicts/s/Conflicts: *//;T;s/, */|/g;p" \
- dpkg/DEBIAN/control`
-
- # Rename package, change architecture, remove conflicts and dependencies
- sed -r -i \
- -e "/Package/s/$/-ia32/" \
- -e "/Architecture/s/:.*$/: amd64/" \
- -e "/Depends/s/:.*/: ia32-libs/" \
- -e "/Provides/s/($conflicts)(, *)?//g;T1;s/, *$//;:1" \
- -e "/Recommends/d" \
- -e "/Conflicts/d" \
- dpkg/DEBIAN/control
-
- # Only keep files that live in "lib" directories or the includes
- # that have changed.
- sed -r -i \
- -e "/\/lib64\//d" -e "/\/.?bin\//d" \
- -e "\,$includes,s,[ /]include/,&32/,g;s,include/32/,include32/,g" \
- -e "s, lib/, lib32/,g" \
- -e "s,/lib/,/lib32/,g" \
- -e "t;d" \
- -e "\,^/usr/lib32/debug\(.*/lib32\),s,^/usr/lib32/debug,/usr/lib/debug," \
- dpkg/DEBIAN/md5sums
-
- # Re-run ldconfig after installation/removal
- { echo "#!/bin/sh"; echo "[ \"x\$1\" = xconfigure ]&&ldconfig||:"; } \
- >dpkg/DEBIAN/postinst
- { echo "#!/bin/sh"; echo "[ \"x\$1\" = xremove ]&&ldconfig||:"; } \
- >dpkg/DEBIAN/postrm
- chmod 755 dpkg/DEBIAN/postinst dpkg/DEBIAN/postrm
-
- # Remove any other control files
- find dpkg/DEBIAN -mindepth 1 "(" -name control -o -name md5sums -o \
- -name postinst -o -name postrm ")" -o -print |
- xargs -r rm -rf
-
- # Remove any files/dirs that live outside of "lib" directories,
- # or are not in our list of changed includes.
- find dpkg -mindepth 1 -regextype posix-extended \
- "(" -name DEBIAN -o -name lib -o -regex "dpkg/($includes)" ")" \
- -prune -o -print | tac |
- xargs -r -n 1 sh -c "rm \$0 2>/dev/null || rmdir \$0 2>/dev/null || : "
- find dpkg -name lib64 -o -name bin -o -name "?bin" |
- tac | xargs -r rm -rf
-
- # Remove any symbolic links that were broken by the above steps.
- find -L dpkg -type l -print | tac | xargs -r rm -rf
-
- # Rename lib to lib32, but keep debug symbols in /usr/lib/debug/usr/lib32
- # That is where gdb looks for them.
- find dpkg -type d -o -path "*/lib/*" -print |
- xargs -r -n 1 sh -c "
- i=\$(echo \"\${0}\" |
- sed -e s,/lib/,/lib32/,g \
- -e s,/usr/lib32/debug\\\\\(.*/lib32\\\\\),/usr/lib/debug\\\\1,);
- mkdir -p \"\${i%/*}\";
- mv \"\${0}\" \"\${i}\""
-
- # Rename include to include32.
- [ -d "dpkg/usr/include" ] && mv "dpkg/usr/include" "dpkg/usr/include32"
-
- # Prune any empty directories
- find dpkg -type d | tac | xargs -r -n 1 rmdir 2>/dev/null || :
-
- # Create our own Debian package
- cd ..
- dpkg --build staging/dpkg .' 2>&1)"
- compat="$(eval echo $(echo "${compat}" |
- sed -e 's,_[^_/]*_amd64.deb,_*_amd64.deb,'))"
- [ -r "${compat}" ] || {
- echo "${msg}" >&2
- echo "Failed to build new Debian archive!" >&2
- exit 1
- }
-
- msg="$(sudo dpkg -i "${compat}" 2>&1)" && {
- echo "Installed ${compat##*/}"
- } || {
- # echo "${msg}" >&2
- echo "Skipped ${compat##*/}"
- }
- done
-
- # Add symbolic links for developing 32bit code
- echo "Adding missing symbolic links, enabling 32bit code development..."
- for i in $(find /lib32 /usr/lib32 -maxdepth 1 -name \*.so.\* |
- sed -e 's/[.]so[.][0-9].*/.so/' |
- sort -u); do
- [ "x${i##*/}" = "xld-linux.so" ] && continue
- [ -r "$i" ] && continue
- j="$(ls "$i."* | sed -e 's/.*[.]so[.]\([^.]*\)$/\1/;t;d' |
- sort -n | tail -n 1)"
- [ -r "$i.$j" ] || continue
- sudo ln -s "${i##*/}.$j" "$i"
- done
-fi
diff --git a/build/java_apk.gypi b/build/java_apk.gypi
index be5033bb4a..08529fa367 100644
--- a/build/java_apk.gypi
+++ b/build/java_apk.gypi
@@ -257,26 +257,6 @@
'includes': ['../build/android/write_ordered_libraries.gypi'],
},
{
- 'action_name': 'native_libraries_template_data_<(_target_name)',
- 'message': 'Creating native_libraries_list.h for <(_target_name)',
- 'inputs': [
- '<(DEPTH)/build/android/gyp/util/build_utils.py',
- '<(DEPTH)/build/android/gyp/create_native_libraries_header.py',
- '<(ordered_libraries_file)',
- ],
- 'outputs': [
- '<(native_libraries_template_data_file)',
- '<(native_libraries_template_version_file)',
- ],
- 'action': [
- 'python', '<(DEPTH)/build/android/gyp/create_native_libraries_header.py',
- '--ordered-libraries=<(ordered_libraries_file)',
- '--version-name=<(native_lib_version_name)',
- '--native-library-list=<(native_libraries_template_data_file)',
- '--version-output=<(native_libraries_template_version_file)',
- ],
- },
- {
'action_name': 'native_libraries_<(_target_name)',
'variables': {
'conditions': [
@@ -324,8 +304,7 @@
'inputs': [
'<(DEPTH)/build/android/gyp/util/build_utils.py',
'<(DEPTH)/build/android/gyp/gcc_preprocess.py',
- '<(native_libraries_template_data_file)',
- '<(native_libraries_template_version_file)',
+ '<(ordered_libraries_file)',
'<(native_libraries_template)',
],
'outputs': [
@@ -333,10 +312,12 @@
],
'action': [
'python', '<(DEPTH)/build/android/gyp/gcc_preprocess.py',
- '--include-path=<(native_libraries_template_data_dir)',
+ '--include-path=',
'--output=<(native_libraries_java_file)',
'--template=<(native_libraries_template)',
'--stamp=<(native_libraries_java_stamp)',
+ '--defines', 'NATIVE_LIBRARIES_LIST=@FileArg(<(ordered_libraries_file):java_libraries_list)',
+ '--defines', 'NATIVE_LIBRARIES_VERSION_NUMBER="<(native_lib_version_name)"',
'<@(gcc_preprocess_defines)',
],
},
@@ -449,7 +430,7 @@
'action': [
'python', '<(DEPTH)/build/android/gyp/create_device_library_links.py',
'--build-device-configuration=<(build_device_config_path)',
- '--libraries-json=<(ordered_libraries_file)',
+ '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
'--script-host-path=<(symlink_script_host_path)',
'--script-device-path=<(symlink_script_device_path)',
'--target-dir=<(device_library_dir)',
diff --git a/build/landmines.py b/build/landmines.py
index c31bac7901..b62e75851c 100755
--- a/build/landmines.py
+++ b/build/landmines.py
@@ -43,7 +43,7 @@ def get_build_dir(build_tool, is_iphone=False):
if build_tool == 'xcode':
ret = os.path.join(SRC_DIR, 'xcodebuild')
elif build_tool in ['make', 'ninja', 'ninja-ios']: # TODO: Remove ninja-ios.
- ret = os.path.join(SRC_DIR, 'out')
+ ret = os.path.join(SRC_DIR, os.environ.get('CHROMIUM_OUT_DIR', 'out'))
elif build_tool in ['msvs', 'vs', 'ib']:
ret = os.path.join(SRC_DIR, 'build')
else:
diff --git a/build/linux/install-arm-sysroot.py b/build/linux/install-arm-sysroot.py
index 5c278ea652..4d593cc718 100755
--- a/build/linux/install-arm-sysroot.py
+++ b/build/linux/install-arm-sysroot.py
@@ -25,6 +25,10 @@ Steps to rebuild the arm sysroot image:
nativeclient-archive2/toolchain/$NACL_REV/sysroot-arm-trusted.tgz
"""
+# TODO(sbc): merge this script into:
+# chrome/installer/linux/sysroot_scripts/install-debian.wheezy.sysroot.py
+
+import hashlib
import os
import shutil
import subprocess
@@ -33,9 +37,23 @@ import sys
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
URL_PREFIX = 'https://storage.googleapis.com'
-URL_PATH = 'nativeclient-archive2/toolchain'
-REVISION = 13035
-TARBALL = 'sysroot-arm-trusted.tgz'
+URL_PATH = 'chrome-linux-sysroot/toolchain'
+REVISION = 285950
+TARBALL = 'debian_wheezy_arm_sysroot.tgz'
+TARBALL_SHA1SUM = 'fc2f54db168887c5190c4c6686c869bedf668b4e'
+
+
+def get_sha1(filename):
+ sha1 = hashlib.sha1()
+ with open(filename, 'rb') as f:
+ while True:
+ # Read in 1mb chunks, so it doesn't all have to be loaded into memory.
+ chunk = f.read(1024*1024)
+ if not chunk:
+ break
+ sha1.update(chunk)
+ return sha1.hexdigest()
+
def main(args):
if '--linux-only' in args:
@@ -70,6 +88,11 @@ def main(args):
else:
curl.append('--silent')
subprocess.check_call(curl)
+ sha1sum = get_sha1(tarball)
+ if sha1sum != TARBALL_SHA1SUM:
+ print 'Tarball sha1sum is wrong.'
+ print 'Expected %s, actual: %s' % (TARBALL_SHA1SUM, sha1sum)
+ return 1
subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot])
os.remove(tarball)
diff --git a/build/linux/system.gyp b/build/linux/system.gyp
index e33f22d4ee..d33bdf420f 100644
--- a/build/linux/system.gyp
+++ b/build/linux/system.gyp
@@ -438,6 +438,31 @@
},
],
}],
+ ['use_udev==1', {
+ 'targets': [
+ {
+ 'target_name': 'udev',
+ 'type': 'none',
+ 'conditions': [
+ ['_toolset=="target"', {
+ 'direct_dependent_settings': {
+ 'cflags': [
+ '<!@(<(pkg-config) --cflags libudev)'
+ ],
+ },
+ 'link_settings': {
+ 'ldflags': [
+ '<!@(<(pkg-config) --libs-only-L --libs-only-other libudev)',
+ ],
+ 'libraries': [
+ '<!@(<(pkg-config) --libs-only-l libudev)',
+ ],
+ },
+ }],
+ ],
+ },
+ ],
+ }],
], # conditions
'targets': [
{
@@ -1032,27 +1057,5 @@
}],
],
},
- {
- 'target_name': 'udev',
- 'type': 'none',
- 'conditions': [
- # libudev is not available on *BSD
- ['_toolset=="target" and os_bsd!=1', {
- 'direct_dependent_settings': {
- 'cflags': [
- '<!@(<(pkg-config) --cflags libudev)'
- ],
- },
- 'link_settings': {
- 'ldflags': [
- '<!@(<(pkg-config) --libs-only-L --libs-only-other libudev)',
- ],
- 'libraries': [
- '<!@(<(pkg-config) --libs-only-l libudev)',
- ],
- },
- }],
- ],
- },
],
}
diff --git a/build/sanitizers/OWNERS b/build/sanitizers/OWNERS
new file mode 100644
index 0000000000..10a3e3b992
--- /dev/null
+++ b/build/sanitizers/OWNERS
@@ -0,0 +1,2 @@
+glider@chromium.org
+per-file tsan_suppressions.cc=*
diff --git a/build/sanitizers/sanitizer_options.cc b/build/sanitizers/sanitizer_options.cc
new file mode 100644
index 0000000000..ece14b9793
--- /dev/null
+++ b/build/sanitizers/sanitizer_options.cc
@@ -0,0 +1,119 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// This file contains the default options for various compiler-based dynamic
+// tools.
+
+#include "build/build_config.h"
+
+// Functions returning default options are declared weak in the tools' runtime
+// libraries. To make the linker pick the strong replacements for those
+// functions from this module, we explicitly force its inclusion by passing
+// -Wl,-u_sanitizer_options_link_helper
+extern "C"
+void _sanitizer_options_link_helper() { }
+
+#if defined(ADDRESS_SANITIZER)
+// Default options for AddressSanitizer in various configurations:
+// strict_memcmp=1 - disable the strict memcmp() checking
+// (http://crbug.com/178677 and http://crbug.com/178404).
+// malloc_context_size=5 - limit the size of stack traces collected by ASan
+// for each malloc/free by 5 frames. These stack traces tend to accumulate
+// very fast in applications using JIT (v8 in Chrome's case), see
+// https://code.google.com/p/address-sanitizer/issues/detail?id=177
+// symbolize=false - disable the in-process symbolization, which isn't 100%
+// compatible with the existing sandboxes and doesn't make much sense for
+// stripped official binaries.
+// legacy_pthread_cond=1 - run in the libpthread 2.2.5 compatibility mode to
+// work around libGL.so using the obsolete API, see
+// http://crbug.com/341805. This may break if pthread_cond_t objects are
+// accessed by both instrumented and non-instrumented binaries (e.g. if
+// they reside in shared memory). This option is going to be deprecated in
+// upstream AddressSanitizer and must not be used anywhere except the
+// official builds.
+// replace_intrin=0 - do not intercept memcpy(), memmove() and memset() to
+// work around http://crbug.com/162461 (ASan report in OpenCL on Mac).
+// check_printf=1 - check the memory accesses to printf (and other formatted
+// output routines) arguments.
+// use_sigaltstack=1 - handle signals on an alternate signal stack. Useful
+// for stack overflow detection.
+// strip_path_prefix=Release/../../ - prefixes up to and including this
+// substring will be stripped from source file paths in symbolized reports
+// (if symbolize=true, which is set when running with LeakSanitizer).
+#if defined(OS_LINUX)
+#if defined(GOOGLE_CHROME_BUILD)
+// Default AddressSanitizer options for the official build. These do not affect
+// tests on buildbots (which don't set GOOGLE_CHROME_BUILD) or non-official
+// Chromium builds.
+const char kAsanDefaultOptions[] =
+ "legacy_pthread_cond=1 malloc_context_size=5 strict_memcmp=0 "
+ "symbolize=false check_printf=1 use_sigaltstack=1 detect_leaks=0 "
+ "strip_path_prefix=Release/../../ ";
+#else
+// Default AddressSanitizer options for buildbots and non-official builds.
+const char *kAsanDefaultOptions =
+ "strict_memcmp=0 symbolize=false check_printf=1 use_sigaltstack=1 "
+ "detect_leaks=0 strip_path_prefix=Release/../../ ";
+#endif // GOOGLE_CHROME_BUILD
+
+#elif defined(OS_MACOSX)
+const char *kAsanDefaultOptions =
+ "strict_memcmp=0 replace_intrin=0 check_printf=1 use_sigaltstack=1 "
+ "strip_path_prefix=Release/../../ ";
+#endif // OS_LINUX
+
+#if defined(OS_LINUX) || defined(OS_MACOSX)
+extern "C"
+__attribute__((no_sanitize_address))
+__attribute__((visibility("default")))
+// The function isn't referenced from the executable itself. Make sure it isn't
+// stripped by the linker.
+__attribute__((used))
+const char *__asan_default_options() {
+ return kAsanDefaultOptions;
+}
+#endif // OS_LINUX || OS_MACOSX
+#endif // ADDRESS_SANITIZER
+
+#if defined(THREAD_SANITIZER) && defined(OS_LINUX)
+// Default options for ThreadSanitizer in various configurations:
+// detect_deadlocks=1 - enable deadlock (lock inversion) detection.
+// second_deadlock_stack=1 - more verbose deadlock reports.
+// report_signal_unsafe=0 - do not report async-signal-unsafe functions
+// called from signal handlers.
+// report_thread_leaks=0 - do not report unjoined threads at the end of
+// the program execution.
+// print_suppressions=1 - print the list of matched suppressions.
+// history_size=7 - make the history buffer proportional to 2^7 (the maximum
+// value) to keep more stack traces.
+// strip_path_prefix=Release/../../ - prefixes up to and including this
+// substring will be stripped from source file paths in symbolized reports.
+const char kTsanDefaultOptions[] =
+ "detect_deadlocks=1 second_deadlock_stack=1 report_signal_unsafe=0 "
+ "report_thread_leaks=0 print_suppressions=1 history_size=7 "
+ "strip_path_prefix=Release/../../ ";
+
+extern "C"
+__attribute__((no_sanitize_thread))
+__attribute__((visibility("default")))
+// The function isn't referenced from the executable itself. Make sure it isn't
+// stripped by the linker.
+__attribute__((used))
+const char *__tsan_default_options() {
+ return kTsanDefaultOptions;
+}
+
+extern "C" char kTSanDefaultSuppressions[];
+
+extern "C"
+__attribute__((no_sanitize_thread))
+__attribute__((visibility("default")))
+// The function isn't referenced from the executable itself. Make sure it isn't
+// stripped by the linker.
+__attribute__((used))
+const char *__tsan_default_suppressions() {
+ return kTSanDefaultSuppressions;
+}
+
+#endif // THREAD_SANITIZER && OS_LINUX
diff --git a/build/sanitizers/sanitizers.gyp b/build/sanitizers/sanitizers.gyp
new file mode 100644
index 0000000000..cbe7ec015d
--- /dev/null
+++ b/build/sanitizers/sanitizers.gyp
@@ -0,0 +1,52 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'sanitizer_options',
+ 'type': 'static_library',
+ 'toolsets': ['host', 'target'],
+ 'variables': {
+ # Every target is going to depend on sanitizer_options, so allow
+ # this one to depend on itself.
+ 'prune_self_dependency': 1,
+ # Do not let 'none' targets depend on this one, they don't need to.
+ 'link_dependency': 1,
+ },
+ 'sources': [
+ 'sanitizer_options.cc',
+ ],
+ 'include_dirs': [
+ '../..',
+ ],
+ # Some targets may want to opt-out from ASan, TSan and MSan and link
+ # without the corresponding runtime libraries. We drop the libc++
+ # dependency and omit the compiler flags to avoid bringing instrumented
+ # code to those targets.
+ 'conditions': [
+ ['use_custom_libcxx==1', {
+ 'dependencies!': [
+ '../../third_party/libc++/libc++.gyp:libcxx_proxy',
+ ],
+ }],
+ ['tsan==1', {
+ 'sources': [
+ 'tsan_suppressions.cc',
+ ],
+ }],
+ ],
+ 'cflags/': [
+ ['exclude', '-fsanitize='],
+ ['exclude', '-fsanitize-'],
+ ],
+ 'direct_dependent_settings': {
+ 'ldflags': [
+ '-Wl,-u_sanitizer_options_link_helper',
+ ],
+ },
+ },
+ ],
+}
+
diff --git a/build/sanitizers/tsan_suppressions.cc b/build/sanitizers/tsan_suppressions.cc
new file mode 100644
index 0000000000..76d4cc18d3
--- /dev/null
+++ b/build/sanitizers/tsan_suppressions.cc
@@ -0,0 +1,308 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for ThreadSanitizer.
+// You can also pass additional suppressions via TSAN_OPTIONS:
+// TSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to
+// http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
+// for more info.
+
+#if defined(THREAD_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kTSanDefaultSuppressions contains TSan suppressions delimited by newlines.
+// See http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
+// for the instructions on writing suppressions.
+char kTSanDefaultSuppressions[] =
+// False positives in libflashplayer.so and libglib.so. Since we don't
+// instrument them, we cannot reason about the synchronization in them.
+"race:libflashplayer.so\n"
+"race:libglib*.so\n"
+
+// Intentional race in ToolsSanityTest.DataRace in base_unittests.
+"race:base/tools_sanity_unittest.cc\n"
+
+// Data race on WatchdogCounter [test-only].
+"race:base/threading/watchdog_unittest.cc\n"
+
+// Races in libevent, http://crbug.com/23244.
+"race:libevent/event.c\n"
+
+// http://crbug.com/46840.
+"race:base::HistogramSamples::IncreaseSum\n"
+"race:base::Histogram::Add\n"
+"race:base::HistogramSamples::Add\n"
+
+// http://crbug.com/84094.
+"race:sqlite3StatusSet\n"
+"race:pcache1EnforceMaxPage\n"
+"race:pcache1AllocPage\n"
+
+// http://crbug.com/102327.
+// Test-only race, won't fix.
+"race:tracked_objects::ThreadData::ShutdownSingleThreadedCleanup\n"
+
+// http://crbug.com/115540
+"race:*GetCurrentThreadIdentifier\n"
+
+// http://crbug.com/120808
+"race:base/threading/watchdog.cc\n"
+
+// http://crbug.com/157586
+"race:third_party/libvpx/source/libvpx/vp8/decoder/threading.c\n"
+
+// http://crbug.com/158718
+"race:third_party/ffmpeg/libavcodec/pthread.c\n"
+"race:third_party/ffmpeg/libavcodec/pthread_frame.c\n"
+"race:third_party/ffmpeg/libavcodec/vp8.c\n"
+"race:third_party/ffmpeg/libavutil/mem.c\n"
+"race:*HashFrameForTesting\n"
+"race:third_party/ffmpeg/libavcodec/h264pred.c\n"
+"race:media::ReleaseData\n"
+
+// http://crbug.com/158922
+"race:third_party/libvpx/source/libvpx/vp8/encoder/*\n"
+
+// http://crbug.com/189177
+"race:thread_manager\n"
+"race:v8::Locker::Initialize\n"
+
+// http://crbug.com/223352
+"race:uprv_malloc_52\n"
+"race:uprv_realloc_52\n"
+
+// http://crbug.com/239359
+"race:media::TestInputCallback::OnData\n"
+
+// http://crbug.com/244368
+"race:skia::BeginPlatformPaint\n"
+
+// http://crbug.com/244385
+"race:unixTempFileDir\n"
+
+// http://crbug.com/244755
+"race:v8::internal::Zone::NewExpand\n"
+"race:TooLateToEnableNow\n"
+"race:adjust_segment_bytes_allocated\n"
+
+// http://crbug.com/244774
+"race:webrtc::RTPReceiver::ProcessBitrate\n"
+"race:webrtc::RTPSender::ProcessBitrate\n"
+"race:webrtc::VideoCodingModuleImpl::Decode\n"
+"race:webrtc::RTPSender::SendOutgoingData\n"
+"race:webrtc::VP8EncoderImpl::GetEncodedPartitions\n"
+"race:webrtc::VP8EncoderImpl::Encode\n"
+"race:webrtc::ViEEncoder::DeliverFrame\n"
+"race:webrtc::vcm::VideoReceiver::Decode\n"
+"race:webrtc::VCMReceiver::FrameForDecoding\n"
+"race:*trace_event_unique_catstatic*\n"
+
+// http://crbug.com/244856
+"race:AutoPulseLock\n"
+
+// http://crbug.com/246968
+"race:webrtc::VideoCodingModuleImpl::RegisterPacketRequestCallback\n"
+
+// http://crbug.com/246970
+"race:webrtc::EventPosix::StartTimer\n"
+
+// http://crbug.com/246974
+"race:content::GpuWatchdogThread::CheckArmed\n"
+
+// http://crbug.com/257396
+"race:base::debug::TraceEventTestFixture_TraceSamplingScope_Test::TestBody\n"
+
+// http://crbug.com/258479
+"race:SamplingStateScope\n"
+"race:g_trace_state\n"
+
+// http://crbug.com/258499
+"race:third_party/skia/include/core/SkRefCnt.h\n"
+
+// http://crbug.com/268924
+"race:base::g_power_monitor\n"
+"race:base::PowerMonitor::PowerMonitor\n"
+"race:base::PowerMonitor::AddObserver\n"
+
+// http://crbug.com/268941
+"race:tracked_objects::ThreadData::tls_index_\n"
+
+// http://crbug.com/270037
+"race:gLibCleanupFunctions\n"
+
+// http://crbug.com/272095
+"race:base::g_top_manager\n"
+
+// http://crbug.com/272987
+"race:webrtc::MediaStreamTrack<webrtc::AudioTrackInterface>::set_enabled\n"
+
+// http://crbug.com/273047
+"race:base::*::g_lazy_tls_ptr\n"
+"race:IPC::SyncChannel::ReceivedSyncMsgQueue::lazy_tls_ptr_\n"
+
+// http://crbug.com/280466
+"race:content::WebRtcAudioCapturer::SetCapturerSource\n"
+
+// http://crbug.com/285242
+"race:media::PulseAudioOutputStream::SetVolume\n"
+
+// http://crbug.com/290964
+"race:PostponeInterruptsScope\n"
+"race:v8::internal::StackGuard::RequestInstallCode\n"
+
+// http://crbug.com/296883
+"race:net::URLFetcherCore::Stop\n"
+
+// http://crbug.com/308590
+"race:CustomThreadWatcher::~CustomThreadWatcher\n"
+
+// http://crbug.com/310851
+"race:net::ProxyResolverV8Tracing::Job::~Job\n"
+
+// http://crbug.com/313726
+"race:CallbackWasCalled\n"
+
+// http://crbug.com/327330
+"race:PrepareTextureMailbox\n"
+"race:cc::LayerTreeHost::PaintLayerContents\n"
+
+// http://crbug.com/328804
+"race:v8::internal::Heap::SetStackLimits\n"
+"race:ScavengePointer\n"
+
+// http://crbug.com/328826
+"race:gLCDOrder\n"
+"race:gLCDOrientation\n"
+
+// http://crbug.com/328868
+"race:PR_Lock\n"
+
+// http://crbug.com/329225
+"race:blink::currentTimeFunction\n"
+
+// http://crbug.com/329460
+"race:extensions::InfoMap::AddExtension\n"
+
+// http://crbug.com/330528
+"race:v8::internal::MarkCompactCollector::SweepInParallel\n"
+
+// http://crbug.com/333244
+"race:content::"
+ "VideoCaptureImplTest::MockVideoCaptureImpl::~MockVideoCaptureImpl\n"
+
+// http://crbug.com/333871
+"race:v8::internal::Interface::NewValue()::value_interface\n"
+"race:v8::internal::IsMinusZero(double)::minus_zero\n"
+"race:v8::internal::FastCloneShallowObjectStub::InitializeInterfaceDescriptor\n"
+"race:v8::internal::KeyedLoadStubCompiler::registers\n"
+"race:v8::internal::KeyedStoreStubCompiler::registers()::registers\n"
+"race:v8::internal::KeyedLoadFastElementStub::InitializeInterfaceDescriptor\n"
+"race:v8::internal::KeyedStoreFastElementStub::InitializeInterfaceDescriptor\n"
+"race:v8::internal::LoadStubCompiler::registers\n"
+"race:v8::internal::StoreStubCompiler::registers\n"
+"race:v8::internal::HValue::LoopWeight\n"
+
+// http://crbug.com/334140
+"race:CommandLine::HasSwitch\n"
+"race:CommandLine::current_process_commandline_\n"
+"race:CommandLine::GetSwitchValueASCII\n"
+
+// http://crbug.com/338675
+"race:blink::s_platform\n"
+"race:content::"
+ "RendererWebKitPlatformSupportImpl::~RendererWebKitPlatformSupportImpl\n"
+
+// http://crbug.com/345240
+"race:WTF::s_shutdown\n"
+
+// http://crbug.com/345245
+"race:jingle_glue::JingleThreadWrapper::~JingleThreadWrapper\n"
+"race:webrtc::voe::Channel::UpdatePacketDelay\n"
+"race:webrtc::voe::Channel::GetDelayEstimate\n"
+"race:webrtc::VCMCodecDataBase::DeregisterReceiveCodec\n"
+"race:webrtc::GainControlImpl::set_stream_analog_level\n"
+
+// http://crbug.com/345618
+"race:WebCore::AudioDestinationNode::render\n"
+
+// http://crbug.com/345624
+"race:media::DataSource::set_host\n"
+
+// http://crbug.com/347534
+"race:v8::internal::V8::TearDown\n"
+
+// http://crbug.com/347538
+"race:sctp_timer_start\n"
+
+// http://crbug.com/347548
+"race:cricket::WebRtcVideoMediaChannel::MaybeResetVieSendCodec\n"
+"race:cricket::WebRtcVideoMediaChannel::SetSendCodec\n"
+
+// http://crbug.com/347553
+"race:blink::WebString::reset\n"
+
+// http://crbug.com/348511
+"race:webrtc::acm1::AudioCodingModuleImpl::PlayoutData10Ms\n"
+
+// http://crbug.com/348982
+"race:cricket::P2PTransportChannel::OnConnectionDestroyed\n"
+"race:cricket::P2PTransportChannel::AddConnection\n"
+
+// http://crbug.com/348984
+"race:sctp_express_handle_sack\n"
+"race:system_base_info\n"
+
+// http://crbug.com/363999
+"race:v8::internal::EnterDebugger::*EnterDebugger\n"
+
+// http://crbug.com/364006
+"race:gfx::ImageFamily::~ImageFamily\n"
+
+// http://crbug.com/364014
+"race:WTF::Latin1Encoding()::globalLatin1Encoding\n"
+
+// https://code.google.com/p/v8/issues/detail?id=3143
+"race:v8::internal::FLAG_track_double_fields\n"
+
+// https://crbug.com/369257
+// TODO(mtklein): annotate properly and remove suppressions.
+"race:SandboxIPCHandler::HandleFontMatchRequest\n"
+"race:SkFontConfigInterfaceDirect::matchFamilyName\n"
+"race:SkFontConfigInterface::GetSingletonDirectInterface\n"
+"race:FcStrStaticName\n"
+
+// http://crbug.com/372807
+"deadlock:net::X509Certificate::CreateCertificateListFromBytes\n"
+"deadlock:net::X509Certificate::CreateFromBytes\n"
+"deadlock:net::SSLClientSocketNSS::Core::DoHandshakeLoop\n"
+
+// http://crbug.com/374135
+"race:media::AlsaWrapper::PcmWritei\n"
+
+// False positive in libc's tzset_internal, http://crbug.com/379738.
+"race:tzset_internal\n"
+
+// http://crbug.com/380554
+"deadlock:g_type_add_interface_static\n"
+
+// http:://crbug.com/386385
+"race:content::AppCacheStorageImpl::DatabaseTask::CallRunCompleted\n"
+
+// http://crbug.com/388730
+"race:g_next_user_script_id\n"
+
+// http://crbug.com/389098
+"race:webrtc::RtpToNtpMs\n"
+"race:webrtc::UpdateRtcpList\n"
+"race:webrtc::RemoteNtpTimeEstimator::Estimate\n"
+"race:webrtc::voe::TransmitMixer::EnableStereoChannelSwapping\n"
+
+// http://crbug.com/397022
+"deadlock:"
+"base::debug::TraceEventTestFixture_ThreadOnceBlocking_Test::TestBody\n"
+
+// End of suppressions.
+; // Please keep this semicolon.
+
+#endif // THREAD_SANITIZER
diff --git a/build/secondary/third_party/android_tools/BUILD.gn b/build/secondary/third_party/android_tools/BUILD.gn
index 44a10d99b3..1a54e15a7a 100644
--- a/build/secondary/third_party/android_tools/BUILD.gn
+++ b/build/secondary/third_party/android_tools/BUILD.gn
@@ -2,6 +2,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import("//build/config/android/rules.gni")
+
config("cpu_features_include") {
include_dirs = [ "ndk/sources/android/cpufeatures" ]
}
@@ -15,3 +17,47 @@ source_set("cpu_features") {
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
}
+
+android_java_prebuilt("android_gcm_java") {
+ jar_path = "$android_sdk_root/extras/google/gcm/gcm-client/dist/gcm.jar"
+}
+
+android_java_prebuilt("uiautomator_java") {
+ jar_path = "$android_sdk/uiautomator.jar"
+}
+
+android_java_prebuilt("android_support_v13_java") {
+ jar_path = "$android_sdk_root/extras/android/support/v13/android-support-v13.jar"
+}
+
+android_resources("android_support_v7_appcompat_resources") {
+ v14_verify_only = true
+ resource_dirs = [
+ "$android_sdk_root/extras/android/support/v7/appcompat/res"
+ ]
+ custom_package = "android.support.v7.appcompat"
+}
+
+android_java_prebuilt("android_support_v7_appcompat_java") {
+ deps = [ ":android_support_v7_appcompat_resources" ]
+ jar_path = "$android_sdk_root/extras/android/support/v7/appcompat/libs/android-support-v7-appcompat.jar"
+}
+
+android_resources("android_support_v7_mediarouter_resources") {
+ v14_verify_only = true
+ resource_dirs = [
+ "$android_sdk_root/extras/android/support/v7/mediarouter/res"
+ ]
+ deps = [
+ ":android_support_v7_appcompat_resources",
+ ]
+ custom_package = "android.support.v7.mediarouter"
+}
+
+android_java_prebuilt("android_support_v7_mediarouter_java") {
+ deps = [
+ ":android_support_v7_mediarouter_resources",
+ ":android_support_v7_appcompat_java",
+ ]
+ jar_path = "$android_sdk_root/extras/android/support/v7/mediarouter/libs/android-support-v7-mediarouter.jar"
+}
diff --git a/build/secondary/tools/grit/grit_rule.gni b/build/secondary/tools/grit/grit_rule.gni
index e1cf53f748..bec147eebb 100644
--- a/build/secondary/tools/grit/grit_rule.gni
+++ b/build/secondary/tools/grit/grit_rule.gni
@@ -282,7 +282,11 @@ template("grit") {
args = [
"-i", source_path, "build",
- "-f", resource_ids,
+ ]
+ if (resource_ids != "") {
+ args += [ "-f", resource_ids ]
+ }
+ args += [
"-o", rebased_output_dir,
"--depdir", ".",
"--depfile", rebase_path(depfile, root_build_dir),
diff --git a/build/secondary/ui/BUILD.gn b/build/secondary/ui/BUILD.gn
deleted file mode 100644
index 05d5c22aa7..0000000000
--- a/build/secondary/ui/BUILD.gn
+++ /dev/null
@@ -1,11 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-component("ui") {
- # TODO(brettw) do this target, this current stuff is only to make Chrome
- # link.
-
- configs += "//build/config/linux:x11"
- }
-}
diff --git a/build/toolchain/android/BUILD.gn b/build/toolchain/android/BUILD.gn
index c927b37f0c..2d1b69a866 100644
--- a/build/toolchain/android/BUILD.gn
+++ b/build/toolchain/android/BUILD.gn
@@ -49,10 +49,17 @@ template("android_gcc_toolchain") {
toolchain_os = "android"
toolchain_cpu_arch = invoker.toolchain_cpu_arch
+ # We make the assumption that the gcc_toolchain will produce a soname with
+ # the following definition.
+ soname = "{{target_output_name}}{{output_extension}}"
+
+ stripped_soname = "lib.stripped/${soname}.tmp"
+ temp_stripped_soname = "${stripped_soname}.tmp"
+
android_strip = "${tool_prefix}strip"
mkdir_command = "mkdir -p lib.stripped"
- strip_command = "$android_strip --strip-unneeded -o lib.stripped/\$soname.tmp \$lib"
- replace_command = "if ! cmp -s lib.stripped/\${soname}.tmp lib.stripped/\${soname}; then mv lib.stripped/\${soname}.tmp lib.stripped/\${soname}; fi"
+ strip_command = "$android_strip --strip-unneeded -o $temp_stripped_soname $soname"
+ replace_command = "if ! cmp -s $temp_stripped_soname $stripped_soname; then mv $temp_stripped_soname $stripped_soname; fi"
postsolink = "$mkdir_command && $strip_command && $replace_command"
}
}
diff --git a/build/toolchain/gcc_toolchain.gni b/build/toolchain/gcc_toolchain.gni
index 15ecb7e335..8f5445b225 100644
--- a/build/toolchain/gcc_toolchain.gni
+++ b/build/toolchain/gcc_toolchain.gni
@@ -75,64 +75,114 @@ template("gcc_toolchain") {
solink_libs_section_postfix = ""
}
- # Make these apply to all tools below.
- lib_prefix = "-l"
- lib_dir_prefix="-L"
+ # These library switches can apply to all tools below.
+ lib_switch = "-l"
+ lib_dir_switch = "-L"
tool("cc") {
- # cflags_pch_c
- command = "$cc -MMD -MF \$out.d \$defines \$includes \$cflags \$cflags_c -c \$in -o \$out"
- description = "CC \$out"
- depfile = "\$out.d"
+ depfile = "{{output}}.d"
+ command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
depsformat = "gcc"
+ description = "CC {{output}}"
+ outputs = [
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o",
+ ]
}
+
tool("cxx") {
- # cflags_pch_cc
- command = "$cxx -MMD -MF \$out.d \$defines \$includes \$cflags \$cflags_cc -c \$in -o \$out"
- description = "CXX \$out"
- depfile = "\$out.d"
+ depfile = "{{output}}.d"
+ command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "CXX {{output}}"
+ outputs = [
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o",
+ ]
+ }
+
+ tool("asm") {
+ # For GCC we can just use the C compiler to compile assembly.
+ depfile = "{{output}}.d"
+ command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
depsformat = "gcc"
+ description = "ASM {{output}}"
+ outputs = [
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o",
+ ]
}
+
tool("alink") {
- command = "rm -f \$out && $ar rcs \$out @\$rspfile"
- description = "AR \$out"
- rspfile = "\$out.rsp"
- rspfile_content = "\$in"
+ rspfile = "{{output}}.rsp"
+ command = "rm -f {{output}} && $ar rcs {{output}} @$rspfile"
+ description = "AR {{output}}"
+ rspfile_content = "{{inputs}}"
+ outputs = [
+ "{{target_out_dir}}/{{target_output_name}}{{output_extension}}"
+ ]
+ default_output_extension = ".a"
+ output_prefix = "lib"
}
+
tool("solink") {
- rspfile = "\$out.rsp"
- rspfile_content = "-Wl,--whole-archive \$in \$solibs -Wl,--no-whole-archive $solink_libs_section_prefix \$libs $solink_libs_section_postfix"
+ soname = "{{target_output_name}}{{output_extension}}" # e.g. "libfoo.so".
+ rspfile = soname + ".rsp"
- # TODO(cjhopman): There needs to be a way for gn to correctly figure out
- # the outputs of a solink command.
+ # These variables are not built into GN but are helpers that implement
+ # (1) linking to produce a .so, (2) extracting the symbols from that file
+ # to a temporary file, (3) if the temporary file has differences from the
+ # existing .TOC file, overwrite it, otherwise, don't change it.
+ tocname = soname + ".TOC"
+ temporary_tocname = soname + ".tmp"
+ link_command = "$ld -shared {{ldflags}} -o $soname -Wl,-soname=$soname @$rspfile"
+ toc_command = "{ readelf -d $soname | grep SONAME ; nm -gD -f p $soname | cut -f1-2 -d' '; } > $temporary_tocname"
+ replace_command = "if ! cmp -s $temporary_tocname $tocname; then mv $temporary_tocname $tocname; fi"
- link_command = "$ld -shared \$ldflags -o \$lib -Wl,-soname=\$soname @\$rspfile"
- toc_command = "{ readelf -d \${lib} | grep SONAME ; nm -gD -f p \${lib} | cut -f1-2 -d' '; } > \${lib}.tmp"
- replace_command = "if ! cmp -s \${lib}.tmp \${lib}.TOC; then mv \${lib}.tmp \${lib}.TOC; fi"
command = "$link_command && $toc_command && $replace_command"
-
if (defined(invoker.postsolink)) {
command += " && " + invoker.postsolink
}
+ rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix"
- description = "SOLINK \$lib"
- #pool = "link_pool"
- restat = "1"
+ description = "SOLINK $soname"
+
+ # Use this for {{output_extension}} expansions unless a target manually
+ # overrides it (in which case {{output_extension}} will be what the target
+ # specifies).
+ default_output_extension = ".so"
+
+ output_prefix = "lib"
+
+ # Since the above commands only updates the .TOC file when it changes, ask
+ # Ninja to check if the timestamp actually changed to know if downstream
+ # dependencies should be recompiled.
+ restat = true
+
+ # Tell GN about the output files. It will link to the soname but use the
+ # tocname for dependency management.
+ outputs = [
+ soname,
+ tocname,
+ ]
+ link_output = soname
+ depend_output = tocname
}
+
tool("link") {
- command = "$ld \$ldflags -o \$out -Wl,--start-group @\$rspfile \$solibs -Wl,--end-group $libs_section_prefix \$libs $libs_section_postfix"
- description = "LINK \$out"
- rspfile = "\$out.rsp"
- rspfile_content = "\$in"
- #pool = "link_pool"
+ outfile = "{{target_output_name}}{{output_extension}}"
+ rspfile = "$outfile.rsp"
+ command = "$ld {{ldflags}} -o $outfile -Wl,--start-group @$rspfile {{solibs}} -Wl,--end-group $libs_section_prefix {{libs}} $libs_section_postfix"
+ description = "LINK $outfile"
+ rspfile_content = "{{inputs}}"
+ outputs = [ outfile ]
}
+
tool("stamp") {
- command = "\${postbuilds}touch \$out"
- description = "STAMP \$out"
+ command = "touch {{output}}"
+ description = "STAMP {{output}}"
}
+
tool("copy") {
- command = "ln -f \$in \$out 2>/dev/null || (rm -rf \$out && cp -af \$in \$out)"
- description = "COPY \$in \$out"
+ command = "ln -f {{source}} {{output}} 2>/dev/null || (rm -rf {{output}} && cp -af {{source}} {{output}})"
+ description = "COPY {{source}} {{output}}"
}
# When invoking this toolchain not as the default one, these args will be
diff --git a/build/toolchain/mac/BUILD.gn b/build/toolchain/mac/BUILD.gn
index c43049cd88..65fb7c661b 100644
--- a/build/toolchain/mac/BUILD.gn
+++ b/build/toolchain/mac/BUILD.gn
@@ -52,71 +52,135 @@ template("mac_clang_toolchain") {
ld = invoker.ld
# Make these apply to all tools below.
- lib_prefix = "-l"
- lib_dir_prefix="-L"
+ lib_switch = "-l"
+ lib_dir_switch = "-L"
tool("cc") {
- command = "$cc -MMD -MF \$out.d \$defines \$includes \$cflags \$cflags_c \$cflags_pch_c -c \$in -o \$out"
- description = "CC \$out"
- depfile = "\$out.d"
+ depfile = "{{output}}.d"
+ command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
depsformat = "gcc"
+ description = "CC {{output}}"
+ outputs = [
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o",
+ ]
}
+
tool("cxx") {
- command = "$cxx -MMD -MF \$out.d \$defines \$includes \$cflags \$cflags_cc \$cflags_pch_cc -c \$in -o \$out"
- description = "CXX \$out"
- depfile = "\$out.d"
+ depfile = "{{output}}.d"
+ command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} -c {{source}} -o {{output}}"
depsformat = "gcc"
+ description = "CXX {{output}}"
+ outputs = [
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o",
+ ]
}
+
+ tool("asm") {
+ # For GCC we can just use the C compiler to compile assembly.
+ depfile = "{{output}}.d"
+ command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "ASM {{output}}"
+ outputs = [
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o",
+ ]
+ }
+
tool("objc") {
- command = "$cc -MMD -MF \$out.d \$defines \$includes \$cflags \$cflags_c \$cflags_objc \$cflags_pch_objc -c \$in -o \$out"
- description = "OBJC \$out"
- depfile = "\$out.d"
+ depfile = "{{output}}.d"
+ command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} {{cflags_objc}} -c {{source}} -o {{output}}"
depsformat = "gcc"
+ description = "OBJC {{output}}"
+ outputs = [
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o",
+ ]
}
+
tool("objcxx") {
- command = "$cxx -MMD -MF \$out.d \$defines \$includes \$cflags \$cflags_cc \$cflags_objcc \$cflags_pch_objcc -c \$in -o \$out"
- description = "OBJCXX \$out"
- depfile = "\$out.d"
+ depfile = "{{output}}.d"
+ command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} {{cflags_objcc}} -c {{source}} -o {{output}}"
depsformat = "gcc"
+ description = "OBJCXX {{output}}"
+ outputs = [
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o",
+ ]
}
+
tool("alink") {
- command = "rm -f \$out && ./gyp-mac-tool filter-libtool libtool \$libtool_flags -static -o \$out \$in \$postbuilds"
- description = "LIBTOOL-STATIC \$out"
+ command = "rm -f {{output}} && ./gyp-mac-tool filter-libtool libtool -static -o {{output}} {{inputs}}"
+ description = "LIBTOOL-STATIC {{output}}"
+ outputs = [
+ "{{target_out_dir}}/{{target_output_name}}{{output_extension}}"
+ ]
+ default_output_extension = ".a"
+ output_prefix = "lib"
}
+
tool("solink") {
- command = "if [ ! -e \$lib -o ! -e \${lib}.TOC ] || otool -l \$lib | grep -q LC_REEXPORT_DYLIB ; then $ld -shared \$ldflags -o \$lib -Wl,-filelist,\$rspfile \$solibs \$libs \$postbuilds && { otool -l \$lib | grep LC_ID_DYLIB -A 5; nm -gP \$lib | cut -f1-2 -d' ' | grep -v U\$\$; true; } > \${lib}.TOC; else $ld -shared \$ldflags -o \$lib \$in \$solibs \$libs \$postbuilds && { otool -l \$lib | grep LC_ID_DYLIB -A 5; nm -gP \$lib | cut -f1-2 -d' ' | grep -v U\$\$; true; } > \${lib}.tmp && if ! cmp -s \${lib}.tmp \${lib}.TOC; then mv \${lib}.tmp \${lib}.TOC ; fi; fi"
- description = "SOLINK \$lib"
- rspfile = "\$out.rsp"
- rspfile_content = "\$in_newline"
- #pool = "link_pool"
- restat = "1"
+ dylib = "{{target_output_name}}{{output_extension}}" # eg "libfoo.dylib"
+ rspfile = dylib + ".rsp"
+
+ # These variables are not build into GN but are helpers that implement
+ # (1) linking to produce a .so, (2) extracting the symbols from that file
+ # to a temporary file, (3) if the temporary file has differences from the
+ # existing .TOC file, overwrite it, oterwise, don't change it.
+ #
+ # As a special case, if the library reexports symbols from other dynamic
+ # libraries, we always update the .TOC and skip the temporary file and
+ # diffing steps, since that library always needs to be re-linked.
+ tocname = dylib + ".TOC"
+ temporary_tocname = dylib + ".tmp"
+
+ does_reexport_command = "[ ! -e $dylib -o ! -e $tocname ] || otool -l $dylib | grep -q LC_REEXPORT_DYLIB"
+ link_command = "$ld -shared {{ldflags}} -o $dylib -Wl,-filelist,$rspfile {{solibs}} {{libs}}"
+ replace_command = "if ! cmp -s $temporary_tocname $tocname; then mv $temporary_tocname $tocname"
+ extract_toc_command = "{ otool -l $dylib | grep LC_ID_DYLIB -A 5; nm -gP $dylib | cut -f1-2 -d' ' | grep -v U\$\$; true; }"
+
+ command = "if $does_reexport_command ; then $link_command && $extract_toc_command > $tocname; else $link_command && $extract_toc_command > $temporary_tocname && $replace_command ; fi; fi"
+
+ rspfile_content = "{{inputs_newline}}"
+
+ description = "SOLINK {{output}}"
+
+ # Use this for {{output_extension}} expansions unless a target manually
+ # overrides it (in which case {{output_extension}} will be what the target
+ # specifies).
+ default_output_extension = ".dylib"
+
+ output_prefix = "lib"
+
+ # Since the above commands only updates the .TOC file when it changes, ask
+ # Ninja to check if the timestamp actually changed to know if downstream
+ # dependencies should be recompiled.
+ restat = true
+
+ # Tell GN about the output files. It will link to the dylib but use the
+ # tocname for dependency management.
+ outputs = [
+ dylib,
+ tocname,
+ ]
+ link_output = dylib
+ depend_output = tocname
}
+
tool("link") {
- command = "$ld \$ldflags -o \$out -Wl,-filelist,\$rspfile \$solibs \$libs \$postbuilds"
- description = "LINK \$out"
- rspfile = "\$out.rsp"
- rspfile_content = "\$in_newline"
- #pool = "link_pool"
+ outfile = "{{target_output_name}}{{output_extension}}"
+ rspfile = "$outfile.rsp"
+ command = "$ld {{ldflags}} -o $outfile -Wl,-filelist,$rspfile {{solibs}} {{libs}}"
+ description = "LINK $outfile"
+ rspfile_content = "{{inputs_newline}}"
+ outputs = [ outfile ]
}
- #tool("infoplist") {
- # command = "$cc -E -P -Wno-trigraphs -x c \$defines \$in -o \$out && plutil -convert xml1 \$out \$out"
- # description = "INFOPLIST \$out"
- #}
- #tool("mac_tool") {
- # command = "\$env ./gyp-mac-tool \$mactool_cmd \$in \$out"
- # description = "MACTOOL \$mactool_cmd \$in"
- #}
- #tool("package_framework") {
- # command = "./gyp-mac-tool package-framework \$out \$version \$postbuilds && touch \$out"
- # description = "PACKAGE FRAMEWORK \$out, POSTBUILDS"
- #}
+
tool("stamp") {
- command = "\${postbuilds}touch \$out"
- description = "STAMP \$out"
+ command = "touch {{output}}"
+ description = "STAMP {{output}}"
}
+
tool("copy") {
- command = "ln -f \$in \$out 2>/dev/null || (rm -rf \$out && cp -af \$in \$out)"
- description = "COPY \$in \$out"
+ command = "ln -f {{source}} {{output}} 2>/dev/null || (rm -rf {{output}} && cp -af {{source}} {{output}})"
+ description = "COPY {{source}} {{output}}"
}
toolchain_args() {
diff --git a/build/toolchain/win/BUILD.gn b/build/toolchain/win/BUILD.gn
index 3006d5bdb4..0a5cf87ad4 100644
--- a/build/toolchain/win/BUILD.gn
+++ b/build/toolchain/win/BUILD.gn
@@ -18,137 +18,148 @@ gyp_win_tool_path = rebase_path("//tools/gyp/pylib/gyp/win_tool.py",
exec_script("setup_toolchain.py",
[ visual_studio_path, gyp_win_tool_path, windows_sdk_path ])
-stamp_command = "$python_path gyp-win-tool stamp \$out"
-copy_command = "$python_path gyp-win-tool recursive-mirror \$in \$out"
-
-# MSVC can't share PDB files between compilers compiling C and C++ files, so
-# we construct different names for each type.
-c_pdb_suffix = " /Fd\${target_out_dir}/\${target_name}_c.pdb"
-cc_pdb_suffix = " /Fd\${target_out_dir}/\${target_name}_cc.pdb"
-
-# 32-bit toolchain -------------------------------------------------------------
-
-toolchain("32") {
- # Make these apply to all tools below.
- lib_prefix = ""
- lib_dir_prefix="/LIBPATH:"
-
- cc_command = "ninja -t msvc -e environment.x86 -- cl.exe /nologo /showIncludes /FC @\$out.rsp /c \$in /Fo\$out"
- tool("cc") {
- command = cc_command + c_pdb_suffix
- description = "CC \$out"
- rspfile = "\$out.rsp"
- rspfile_content = "\$defines \$includes \$cflags \$cflags_c"
- depsformat = "msvc"
- }
- tool("cxx") {
- command = cc_command + cc_pdb_suffix
- description = "CXX \$out"
- rspfile = "\$out.rsp"
- rspfile_content = "\$defines \$includes \$cflags \$cflags_cc"
- depsformat = "msvc"
- }
- tool("rc") {
- command = "$python_path gyp-win-tool rc-wrapper environment.x86 rc.exe \$defines \$includes \$rcflags /fo\$out \$in"
- description = "RC \$in"
- }
- tool("asm") {
- command = "$python_path gyp-win-tool asm-wrapper environment.x86 ml.exe \$defines \$includes /c /Fo \$out \$in"
- description = "ASM \$in"
- }
- tool("alink") {
- command = "$python_path gyp-win-tool link-wrapper environment.x86 False lib.exe /nologo /ignore:4221 /OUT:\$out @\$out.rsp"
- description = "LIB \$out"
- rspfile = "\$out.rsp"
- rspfile_content = "\$in_newline \$libflags"
- }
- tool("solink") {
- command = "cmd /c $python_path gyp-win-tool link-wrapper environment.x86 False link.exe /nologo \$implibflag /DLL /OUT:\$dll /PDB:\$dll.pdb @\$dll.rsp && $python_path gyp-win-tool manifest-wrapper environment.x86 mt.exe -nologo -manifest \$manifests -out:\$dll.manifest"
- description = "LINK(DLL) \$dll"
- restat = "1"
- rspfile = "\$dll.rsp"
- rspfile_content = "\$libs \$in_newline \$ldflags"
- }
- tool("link") {
- command = "cmd /c $python_path gyp-win-tool link-wrapper environment.x86 False link.exe /nologo /OUT:\$out /PDB:\$out.pdb @\$out.rsp && $python_path gyp-win-tool manifest-wrapper environment.x86 mt.exe -nologo -manifest \$manifests -out:\$out.manifest"
- description = "LINK \$out"
- rspfile = "\$out.rsp"
- rspfile_content = "\$in_newline \$libs \$ldflags"
- }
- tool("stamp") {
- command = stamp_command
- description = "STAMP \$out"
- }
- tool("copy") {
- command = copy_command
- description = "COPY \$in \$out"
- }
-}
+# Parameters:
+# cpu_arch: cpu_arch to pass as a build arg
+# environment: File name of environment file.
+# force_win64 (optional): value for this build arg.
+template("msvc_toolchain") {
+ env = invoker.environment
-# 64-bit toolchain -------------------------------------------------------------
+ toolchain(target_name) {
+ # Make these apply to all tools below.
+ lib_switch = ""
+ lib_dir_switch="/LIBPATH:"
-toolchain("64") {
- # Make these apply to all tools below.
- lib_prefix = ""
- lib_dir_prefix="/LIBPATH:"
+ tool("cc") {
+ rspfile = "{{output}}.rsp"
+ pdbname = "{{target_out_dir}}/{{target_output_name}}_c.pdb"
+ command = "ninja -t msvc -e $env -- cl.exe /nologo /showIncludes /FC @$rspfile /c {{source}} /Fo{{output}} /Fd$pdbname"
+ depsformat = "msvc"
+ description = "CC {{output}}"
+ outputs = [
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.obj",
+ ]
+ rspfile_content = "{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}"
+ }
- cc_command = "ninja -t msvc -e environment.x64 -- cl.exe /nologo /showIncludes /FC @\$out.rsp /c \$in /Fo\$out"
- tool("cc") {
- command = cc_command + c_pdb_suffix
- description = "CC \$out"
- rspfile = "\$out.rsp"
- rspfile_content = "\$defines \$includes \$cflags \$cflags_c"
- depsformat = "msvc"
- }
- tool("cxx") {
- command = cc_command + cc_pdb_suffix
- description = "CXX \$out"
- rspfile = "\$out.rsp"
- rspfile_content = "\$defines \$includes \$cflags \$cflags_cc"
- depsformat = "msvc"
- }
- tool("rc") {
- command = "$python_path gyp-win-tool rc-wrapper environment.x64 rc.exe \$defines \$includes \$rcflags /fo\$out \$in"
- description = "RC \$in"
- }
- tool("asm") {
- command = "$python_path gyp-win-tool asm-wrapper environment.x64 ml.exe \$defines \$includes /c /Fo \$out \$in"
- description = "ASM \$in"
- }
- tool("alink") {
- command = "$python_path gyp-win-tool link-wrapper environment.x64 False lib.exe /nologo /ignore:4221 /OUT:\$out @\$out.rsp"
- description = "LIB \$out"
- rspfile = "\$out.rsp"
- rspfile_content = "\$in_newline \$libflags"
- }
- tool("solink") {
- command = "cmd /c $python_path gyp-win-tool link-wrapper environment.x64 False link.exe /nologo \$implibflag /DLL /OUT:\$dll /PDB:\$dll.pdb @\$dll.rsp && $python_path gyp-win-tool manifest-wrapper environment.x64 mt.exe -nologo -manifest \$manifests -out:\$dll.manifest"
- description = "LINK(DLL) \$dll"
- restat = "1"
- rspfile = "\$dll.rsp"
- rspfile_content = "\$libs \$in_newline \$ldflags"
- }
- tool("link") {
- command = "cmd /c $python_path gyp-win-tool link-wrapper environment.x64 False link.exe /nologo /OUT:\$out /PDB:\$out.pdb @\$out.rsp && $python_path gyp-win-tool manifest-wrapper environment.x64 mt.exe -nologo -manifest \$manifests -out:\$out.manifest"
- description = "LINK \$out"
- rspfile = "\$out.rsp"
- rspfile_content = "\$in_newline \$libs \$ldflags"
- }
- tool("stamp") {
- command = stamp_command
- description = "STAMP \$out"
- }
- tool("copy") {
- command = copy_command
- description = "COPY \$in \$out"
- }
+ tool("cxx") {
+ rspfile = "{{output}}.rsp"
+ # The PDB name needs to be different between C and C++ compiled files.
+ pdbname = "{{target_out_dir}}/{{target_output_name}}_cc.pdb"
+ command = "ninja -t msvc -e $env -- cl.exe /nologo /showIncludes /FC @$rspfile /c {{source}} /Fo{{output}} /Fd$pdbname"
+ depsformat = "msvc"
+ description = "CXX {{output}}"
+ outputs = [
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.obj",
+ ]
+ rspfile_content = "{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}"
+ }
- # When invoking this toolchain not as the default one, these args will be
- # passed to the build. They are ignored when this is the default toolchain.
- toolchain_args() {
- cpu_arch = "x64"
- # Normally the build config resets the CPU architecture to 32-bits. Setting
- # this flag overrides that behavior.
- force_win64 = true
+ tool("rc") {
+ command = "$python_path gyp-win-tool rc-wrapper $env rc.exe {{defines}} {{include_dirs}} /fo{{output}} {{source}}"
+ outputs = [
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.res",
+ ]
+ description = "RC {{output}}"
+ }
+
+ tool("asm") {
+ command = "$python_path gyp-win-tool asm-wrapper $env ml.exe {{defines}} {{include_dirs}} /c /Fo {{output}} {{source}}"
+ description = "ASM {{output}}"
+ outputs = [
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.obj",
+ ]
+ }
+
+ tool("alink") {
+ rspfile = "{{output}}.rsp"
+ command = "$python_path gyp-win-tool link-wrapper $env False lib.exe /nologo /ignore:4221 /OUT:{{output}} @$rspfile"
+ description = "LIB {{output}}"
+ outputs = [
+ # Ignore {{output_extension}} and always use .lib, there's no reason to
+ # allow targets to override this extension on Windows.
+ "{{target_out_dir}}/{{target_output_name}}.lib",
+ ]
+ default_output_extension = ".lib"
+ # The use of inputs_newline is to work around a fixed per-line buffer
+ # size in the linker.
+ rspfile_content = "{{inputs_newline}}"
+ }
+
+ tool("solink") {
+ dllname = "{{target_output_name}}{{output_extension}}" # e.g. foo.dll
+ libname = "{{target_output_name}}{{output_extension}}.lib" # e.g. foo.dll.lib
+ rspfile = "${dllname}.rsp"
+
+ link_command = "$python_path gyp-win-tool link-wrapper $env False link.exe /nologo /IMPLIB:$libname /DLL /OUT:$dllname /PDB:${dllname}.pdb @$rspfile"
+
+ # TODO(brettw) support manifests
+ #manifest_command = "$python_path gyp-win-tool manifest-wrapper $env mt.exe -nologo -manifest $manifests -out:${dllname}.manifest"
+ #command = "cmd /c $link_command && $manifest_command"
+ command = link_command
+
+ default_output_extension = ".dll"
+ description = "LINK(DLL) {{output}}"
+ outputs = [
+ dllname,
+ libname,
+ ]
+ # The use of inputs_newline is to work around a fixed per-line buffer
+ # size in the linker.
+ rspfile_content = "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}}"
+ }
+
+ tool("link") {
+ rspfile = "{{output}}.rsp"
+
+ link_command = "$python_path gyp-win-tool link-wrapper $env False link.exe /nologo /OUT:{{output}} /PDB:{{output}}.pdb @$rspfile"
+
+ # TODO(brettw) support manifests
+ #manifest_command = "$python_path gyp-win-tool manifest-wrapper $env mt.exe -nologo -manifest $manifests -out:{{output}}.manifest"
+ #command = "cmd /c $link_command && $manifest_command"
+ command = link_command
+
+ default_output_extension = ".exe"
+ description = "LINK {{output}}"
+ outputs = [
+ "{{target_output_name}}{{output_extension}}",
+ ]
+ # The use of inputs_newline is to work around a fixed per-line buffer
+ # size in the linker.
+ rspfile_content = "{{inputs_newline}} {{libs}} {{solibs}} {{ldflags}}"
+ }
+
+ tool("stamp") {
+ command = "$python_path gyp-win-tool stamp {{output}}"
+ description = "STAMP {{output}}"
+ }
+
+ tool("copy") {
+ command = "$python_path gyp-win-tool recursive-mirror {{source}} {{output}}"
+ description = "COPY {{source}} {{output}}"
+ }
+
+ # When invoking this toolchain not as the default one, these args will be
+ # passed to the build. They are ignored when this is the default toolchain.
+ toolchain_args() {
+ cpu_arch = invoker.cpu_arch
+
+ # Normally the build config resets the CPU architecture to 32-bits. Setting
+ # this flag overrides that behavior.
+ if (defined(invoker.force_win64)) {
+ force_win64 = invoker.force_win64
+ }
+ }
}
}
+
+msvc_toolchain("32") {
+ environment = "environment.x86"
+ cpu_arch = "x64"
+}
+
+msvc_toolchain("64") {
+ environment = "environment.x64"
+ cpu_arch = "x64"
+ force_win64 = true
+}
diff --git a/build/util/LASTCHANGE b/build/util/LASTCHANGE
index fd5e3f1445..a56ba749e0 100644
--- a/build/util/LASTCHANGE
+++ b/build/util/LASTCHANGE
@@ -1 +1 @@
-LASTCHANGE=290040
+LASTCHANGE=291560
diff --git a/build/util/LASTCHANGE.blink b/build/util/LASTCHANGE.blink
index deef9f97a6..e5b01e67b1 100644
--- a/build/util/LASTCHANGE.blink
+++ b/build/util/LASTCHANGE.blink
@@ -1 +1 @@
-LASTCHANGE=180365
+LASTCHANGE=180790
diff --git a/build/util/lastchange.py b/build/util/lastchange.py
index 8d758d2a24..28a266ddaf 100755
--- a/build/util/lastchange.py
+++ b/build/util/lastchange.py
@@ -99,12 +99,25 @@ def FetchGitRevision(directory):
Returns:
A VersionInfo object or None on error.
"""
+ hsh = ''
proc = RunGitCommand(directory, ['rev-parse', 'HEAD'])
if proc:
output = proc.communicate()[0].strip()
if proc.returncode == 0 and output:
- return VersionInfo('git', output[:7])
- return None
+ hsh = output
+ if not hsh:
+ return None
+ pos = ''
+ proc = RunGitCommand(directory, ['show', '-s', '--format=%B', 'HEAD'])
+ if proc:
+ output = proc.communicate()[0]
+ if proc.returncode == 0 and output:
+ for line in reversed(output.splitlines()):
+ if line.startswith('Cr-Commit-Position:'):
+ pos = line.rsplit()[-1].strip()
+ if not pos:
+ return VersionInfo('git', hsh)
+ return VersionInfo('git', '%s-%s' % (hsh, pos))
def FetchGitSVNURLAndRevision(directory, svn_url_regex):
@@ -116,8 +129,7 @@ def FetchGitSVNURLAndRevision(directory, svn_url_regex):
Returns:
A tuple containing the Subversion URL and revision.
"""
- proc = RunGitCommand(directory, ['log', '-1',
- '--grep=git-svn-id', '--format=%b'])
+ proc = RunGitCommand(directory, ['log', '-1', '--format=%b'])
if proc:
output = proc.communicate()[0].strip()
if proc.returncode == 0 and output:
diff --git a/build/whitespace_file.txt b/build/whitespace_file.txt
index b57552708a..aac2b5ff1d 100644
--- a/build/whitespace_file.txt
+++ b/build/whitespace_file.txt
@@ -87,6 +87,7 @@ I can feel the heat closing in, feel them out there making their moves...
What could possibly go wrong? We've already ate our cake.
Stand Still. Pause Clocks. We can make the World Stop.
+WUBWUBWUBWUBWUB
I want a 1917 build and you will give me what I want.
@@ -128,6 +129,5 @@ their outdated binaries. In clobberus, veritas.
As the git approaches, light begins to shine through the SCM thrice again...
However, the git, is, after all, quite stupid.
-*
-**
-***
+
+Suddenly Domo-Kun found itself in a room filled with dazzling mirrors.