summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTorne (Richard Coles) <torne@google.com>2014-01-02 13:44:52 +0000
committerTorne (Richard Coles) <torne@google.com>2014-01-02 13:44:52 +0000
commitf5fb80afb259f33357c7a90d78d9398904e036c1 (patch)
tree3bd0f185161e0290d088babf43ef3bd2566975fd
parent61a3675259f54120dc9edfce3f2bc6dc5c95a90e (diff)
parentc6fc4f9c8822be15521d3093341a8e2b5f419808 (diff)
downloadgyp-kitkat-mr2.2-release.tar.gz
This commit was generated by merge_to_master.py. Change-Id: I2ecca578cdf5afd35db75ada0a7cd79b940541ca
-rw-r--r--pylib/gyp/common.py8
-rw-r--r--pylib/gyp/generator/android.py4
-rw-r--r--pylib/gyp/generator/cmake.py9
-rw-r--r--pylib/gyp/generator/eclipse.py6
-rw-r--r--pylib/gyp/generator/make.py12
-rw-r--r--pylib/gyp/generator/msvs.py116
-rw-r--r--pylib/gyp/generator/ninja.py87
-rw-r--r--pylib/gyp/input.py3
-rw-r--r--pylib/gyp/msvs_emulation.py73
-rw-r--r--pylib/gyp/ordered_dict.py266
-rwxr-xr-xpylib/gyp/win_tool.py77
-rw-r--r--pylib/gyp/xcode_emulation.py35
-rwxr-xr-xtest/mac/gyptest-app.py15
-rw-r--r--test/mac/gyptest-sdkroot.py37
-rwxr-xr-xtest/mac/gyptest-xcode-env-order.py13
-rw-r--r--test/mac/gyptest-xcode-gcc.py18
-rwxr-xr-xtest/mac/sdkroot/test_shorthand.sh12
-rw-r--r--test/mac/xcode-gcc/test.gyp5
-rw-r--r--test/win/gyptest-link-generate-manifest.py8
-rw-r--r--test/win/gyptest-link-ordering.py75
-rw-r--r--test/win/gyptest-link-unsupported-manifest.py27
-rw-r--r--test/win/gyptest-link-update-manifest.py103
-rw-r--r--test/win/linker-flags/a/z.cc7
-rw-r--r--test/win/linker-flags/b/y.cc7
-rw-r--r--test/win/linker-flags/generate-manifest.gyp10
-rw-r--r--test/win/linker-flags/link-ordering.gyp65
-rw-r--r--test/win/linker-flags/main-crt.c8
-rw-r--r--test/win/linker-flags/manifest-in-comment.cc13
-rw-r--r--test/win/linker-flags/unsupported-manifest.gyp13
-rw-r--r--test/win/linker-flags/x.cc7
-rw-r--r--test/win/linker-flags/y.cc7
-rw-r--r--test/win/linker-flags/z.cc7
32 files changed, 985 insertions, 168 deletions
diff --git a/pylib/gyp/common.py b/pylib/gyp/common.py
index b9d2abef..f9c6c6f3 100644
--- a/pylib/gyp/common.py
+++ b/pylib/gyp/common.py
@@ -391,6 +391,14 @@ def WriteOnDiff(filename):
return Writer()
+def EnsureDirExists(path):
+ """Make sure the directory for |path| exists."""
+ try:
+ os.makedirs(os.path.dirname(path))
+ except OSError:
+ pass
+
+
def GetFlavor(params):
"""Returns |params.flavor| if it's set, the system's default flavor else."""
flavors = {
diff --git a/pylib/gyp/generator/android.py b/pylib/gyp/generator/android.py
index 63036bb2..41346e2b 100644
--- a/pylib/gyp/generator/android.py
+++ b/pylib/gyp/generator/android.py
@@ -145,7 +145,7 @@ class AndroidMkWriter(object):
spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all'
"""
- make.ensure_directory_exists(output_filename)
+ gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, 'w')
@@ -983,7 +983,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
makefile_path = os.path.join(options.toplevel_dir, makefile_name)
assert not options.generator_output, (
'The Android backend does not support options.generator_output.')
- make.ensure_directory_exists(makefile_path)
+ gyp.common.EnsureDirExists(makefile_path)
root_makefile = open(makefile_path, 'w')
root_makefile.write(header)
diff --git a/pylib/gyp/generator/cmake.py b/pylib/gyp/generator/cmake.py
index 16118999..10d015ee 100644
--- a/pylib/gyp/generator/cmake.py
+++ b/pylib/gyp/generator/cmake.py
@@ -118,13 +118,6 @@ def NormjoinPath(base_path, rel_path):
return os.path.normpath(os.path.join(base_path, rel_path))
-def EnsureDirectoryExists(path):
- """Python version of 'mkdir -p'."""
- dirPath = os.path.dirname(path)
- if dirPath and not os.path.exists(dirPath):
- os.makedirs(dirPath)
-
-
def CMakeStringEscape(a):
"""Escapes the string 'a' for use inside a CMake string.
@@ -1041,7 +1034,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data,
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
output_file = os.path.join(toplevel_build, 'CMakeLists.txt')
- EnsureDirectoryExists(output_file)
+ gyp.common.EnsureDirExists(output_file)
output = open(output_file, 'w')
output.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
diff --git a/pylib/gyp/generator/eclipse.py b/pylib/gyp/generator/eclipse.py
index a80edc89..84380b04 100644
--- a/pylib/gyp/generator/eclipse.py
+++ b/pylib/gyp/generator/eclipse.py
@@ -270,9 +270,9 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'),
os.path.join(toplevel_build, 'gen')]
- if not os.path.exists(toplevel_build):
- os.makedirs(toplevel_build)
- out = open(os.path.join(toplevel_build, 'eclipse-cdt-settings.xml'), 'w')
+ out_name = os.path.join(toplevel_build, 'eclipse-cdt-settings.xml')
+ gyp.common.EnsureDirExists(out_name)
+ out = open(out_name, 'w')
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
out.write('<cdtprojectproperties>\n')
diff --git a/pylib/gyp/generator/make.py b/pylib/gyp/generator/make.py
index d4078433..5cf67632 100644
--- a/pylib/gyp/generator/make.py
+++ b/pylib/gyp/generator/make.py
@@ -117,12 +117,6 @@ def CalculateGeneratorInputInfo(params):
}
-def ensure_directory_exists(path):
- dir = os.path.dirname(path)
- if dir and not os.path.exists(dir):
- os.makedirs(dir)
-
-
# The .d checking code below uses these functions:
# wildcard, sort, foreach, shell, wordlist
# wildcard can handle spaces, the rest can't.
@@ -691,7 +685,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all'
"""
- ensure_directory_exists(output_filename)
+ gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, 'w')
@@ -820,7 +814,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
targets: list of "all" targets for this sub-project
build_dir: build output directory, relative to the sub-project
"""
- ensure_directory_exists(output_filename)
+ gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, 'w')
self.fp.write(header)
# For consistency with other builders, put sub-project build output in the
@@ -2067,7 +2061,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
header_params['make_global_settings'] = make_global_settings
- ensure_directory_exists(makefile_path)
+ gyp.common.EnsureDirExists(makefile_path)
root_makefile = open(makefile_path, 'w')
root_makefile.write(SHARED_HEADER % header_params)
# Currently any versions have the same effect, but in future the behavior
diff --git a/pylib/gyp/generator/msvs.py b/pylib/gyp/generator/msvs.py
index 0287eb19..81cc6dd9 100644
--- a/pylib/gyp/generator/msvs.py
+++ b/pylib/gyp/generator/msvs.py
@@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import collections
import copy
import ntpath
import os
@@ -21,6 +22,16 @@ import gyp.MSVSUtil as MSVSUtil
import gyp.MSVSVersion as MSVSVersion
from gyp.common import GypError
+# TODO: Remove once bots are on 2.7, http://crbug.com/241769
+def _import_OrderedDict():
+ import collections
+ try:
+ return collections.OrderedDict
+ except AttributeError:
+ import ordered_dict
+ return ordered_dict.OrderedDict
+OrderedDict = _import_OrderedDict()
+
# Regular expression for validating Visual Studio GUIDs. If the GUID
# contains lowercase hex letters, MSVS will be fine. However,
@@ -86,6 +97,46 @@ cached_username = None
cached_domain = None
+# Based on http://code.activestate.com/recipes/576694/.
+class OrderedSet(collections.MutableSet):
+ def __init__(self, iterable=None):
+ self.end = end = []
+ end += [None, end, end] # sentinel node for doubly linked list
+ self.map = {} # key --> [key, prev, next]
+ if iterable is not None:
+ self |= iterable
+
+ def __len__(self):
+ return len(self.map)
+
+ def discard(self, key):
+ if key in self.map:
+ key, prev, next = self.map.pop(key)
+ prev[2] = next
+ next[1] = prev
+
+ def __contains__(self, key):
+ return key in self.map
+
+ def add(self, key):
+ if key not in self.map:
+ end = self.end
+ curr = end[1]
+ curr[2] = end[1] = self.map[key] = [key, curr, end]
+
+ def update(self, iterable):
+ for i in iterable:
+ if i not in self:
+ self.add(i)
+
+ def __iter__(self):
+ end = self.end
+ curr = end[2]
+ while curr is not end:
+ yield curr[0]
+ curr = curr[2]
+
+
# TODO(gspencer): Switch the os.environ calls to be
# win32api.GetDomainName() and win32api.GetUserName() once the
# python version in depot_tools has been updated to work on Vista
@@ -179,7 +230,7 @@ def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None,
if not prefix: prefix = []
result = []
excluded_result = []
- folders = dict()
+ folders = OrderedDict()
# Gather files into the final result, excluded, or folders.
for s in sources:
if len(s) == 1:
@@ -415,13 +466,13 @@ def _AddAccumulatedActionsToMSVS(p, spec, actions_dict):
dicts describing the actions attached to that input file.
"""
for primary_input in actions_dict:
- inputs = set()
- outputs = set()
+ inputs = OrderedSet()
+ outputs = OrderedSet()
descriptions = []
commands = []
for action in actions_dict[primary_input]:
- inputs.update(set(action['inputs']))
- outputs.update(set(action['outputs']))
+ inputs.update(OrderedSet(action['inputs']))
+ outputs.update(OrderedSet(action['outputs']))
descriptions.append(action['description'])
commands.append(action['command'])
# Add the custom build step for one input file.
@@ -477,8 +528,8 @@ def _RuleInputsAndOutputs(rule, trigger_file):
"""
raw_inputs = _FixPaths(rule.get('inputs', []))
raw_outputs = _FixPaths(rule.get('outputs', []))
- inputs = set()
- outputs = set()
+ inputs = OrderedSet()
+ outputs = OrderedSet()
inputs.add(trigger_file)
for i in raw_inputs:
inputs.add(_RuleExpandPath(i, trigger_file))
@@ -549,16 +600,16 @@ def _GenerateExternalRules(rules, output_dir, spec,
mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n')
mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n')
# Gather stuff needed to emit all: target.
- all_inputs = set()
- all_outputs = set()
- all_output_dirs = set()
+ all_inputs = OrderedSet()
+ all_outputs = OrderedSet()
+ all_output_dirs = OrderedSet()
first_outputs = []
for rule in rules:
trigger_files = _FindRuleTriggerFiles(rule, sources)
for tf in trigger_files:
inputs, outputs = _RuleInputsAndOutputs(rule, tf)
- all_inputs.update(set(inputs))
- all_outputs.update(set(outputs))
+ all_inputs.update(OrderedSet(inputs))
+ all_outputs.update(OrderedSet(outputs))
# Only use one target from each rule as the dependency for
# 'all' so we don't try to build each rule multiple times.
first_outputs.append(list(outputs)[0])
@@ -799,8 +850,8 @@ def _AdjustSourcesForRules(spec, rules, sources, excluded_sources):
trigger_files = _FindRuleTriggerFiles(rule, sources)
for trigger_file in trigger_files:
inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
- inputs = set(_FixPaths(inputs))
- outputs = set(_FixPaths(outputs))
+ inputs = OrderedSet(_FixPaths(inputs))
+ outputs = OrderedSet(_FixPaths(outputs))
inputs.remove(_FixPath(trigger_file))
sources.update(inputs)
if not spec.get('msvs_external_builder'):
@@ -817,7 +868,7 @@ def _FilterActionsFromExcluded(excluded_sources, actions_to_add):
Returns:
excluded_sources with files that have actions attached removed.
"""
- must_keep = set(_FixPaths(actions_to_add.keys()))
+ must_keep = OrderedSet(_FixPaths(actions_to_add.keys()))
return [s for s in excluded_sources if s not in must_keep]
@@ -900,9 +951,7 @@ def _GenerateMSVSProject(project, options, version, generator_flags):
generator_flags: dict of generator-specific flags.
"""
spec = project.spec
- vcproj_dir = os.path.dirname(project.path)
- if vcproj_dir and not os.path.exists(vcproj_dir):
- os.makedirs(vcproj_dir)
+ gyp.common.EnsureDirExists(project.path)
platforms = _GetUniquePlatforms(spec)
p = MSVSProject.Writer(project.path, version, spec['target_name'],
@@ -965,7 +1014,7 @@ def _GetUniquePlatforms(spec):
The MSVSUserFile object created.
"""
# Gather list of unique platforms.
- platforms = set()
+ platforms = OrderedSet()
for configuration in spec['configurations']:
platforms.add(_ConfigPlatform(spec['configurations'][configuration]))
platforms = list(platforms)
@@ -1152,7 +1201,7 @@ def _GetLibraries(spec):
# in libraries that are assumed to be in the default library path).
# Also remove duplicate entries, leaving only the last duplicate, while
# preserving order.
- found = set()
+ found = OrderedSet()
unique_libraries_list = []
for entry in reversed(libraries):
library = re.sub('^\-l', '', entry)
@@ -1331,8 +1380,7 @@ def _GetMSVSAttributes(spec, config, config_type):
def _AddNormalizedSources(sources_set, sources_array):
- sources = [_NormalizedSource(s) for s in sources_array]
- sources_set.update(set(sources))
+ sources_set.update(_NormalizedSource(s) for s in sources_array)
def _PrepareListOfSources(spec, generator_flags, gyp_file):
@@ -1350,9 +1398,9 @@ def _PrepareListOfSources(spec, generator_flags, gyp_file):
A pair of (list of sources, list of excluded sources).
The sources will be relative to the gyp file.
"""
- sources = set()
+ sources = OrderedSet()
_AddNormalizedSources(sources, spec.get('sources', []))
- excluded_sources = set()
+ excluded_sources = OrderedSet()
# Add in the gyp file.
if not generator_flags.get('standalone'):
sources.add(gyp_file)
@@ -1362,7 +1410,7 @@ def _PrepareListOfSources(spec, generator_flags, gyp_file):
inputs = a['inputs']
inputs = [_NormalizedSource(i) for i in inputs]
# Add all inputs to sources and excluded sources.
- inputs = set(inputs)
+ inputs = OrderedSet(inputs)
sources.update(inputs)
if not spec.get('msvs_external_builder'):
excluded_sources.update(inputs)
@@ -1391,7 +1439,7 @@ def _AdjustSourcesAndConvertToFilterHierarchy(
path of excluded IDL file)
"""
# Exclude excluded sources coming into the generator.
- excluded_sources.update(set(spec.get('sources_excluded', [])))
+ excluded_sources.update(OrderedSet(spec.get('sources_excluded', [])))
# Add excluded sources into sources for good measure.
sources.update(excluded_sources)
# Convert to proper windows form.
@@ -1484,7 +1532,7 @@ def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl):
def _AddToolFilesToMSVS(p, spec):
# Add in tool files (rules).
- tool_files = set()
+ tool_files = OrderedSet()
for _, config in spec['configurations'].iteritems():
for f in config.get('msvs_tool_files', []):
tool_files.add(f)
@@ -3056,9 +3104,7 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
spec = project.spec
configurations = spec['configurations']
project_dir, project_file_name = os.path.split(project.path)
- msbuildproj_dir = os.path.dirname(project.path)
- if msbuildproj_dir and not os.path.exists(msbuildproj_dir):
- os.makedirs(msbuildproj_dir)
+ gyp.common.EnsureDirExists(project.path)
# Prepare list of sources and excluded sources.
gyp_path = _NormalizedSource(project.build_file)
relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
@@ -3207,16 +3253,16 @@ def _GenerateActionsForMSBuild(spec, actions_to_add):
Returns:
A pair of (action specification, the sources handled by this action).
"""
- sources_handled_by_action = set()
+ sources_handled_by_action = OrderedSet()
actions_spec = []
for primary_input, actions in actions_to_add.iteritems():
- inputs = set()
- outputs = set()
+ inputs = OrderedSet()
+ outputs = OrderedSet()
descriptions = []
commands = []
for action in actions:
- inputs.update(set(action['inputs']))
- outputs.update(set(action['outputs']))
+ inputs.update(OrderedSet(action['inputs']))
+ outputs.update(OrderedSet(action['outputs']))
descriptions.append(action['description'])
cmd = action['command']
# For most actions, add 'call' so that actions that invoke batch files
diff --git a/pylib/gyp/generator/ninja.py b/pylib/gyp/generator/ninja.py
index a40c7fe2..d3db2c88 100644
--- a/pylib/gyp/generator/ninja.py
+++ b/pylib/gyp/generator/ninja.py
@@ -1039,13 +1039,18 @@ class NinjaWriter:
elif self.flavor == 'win':
manifest_name = self.GypPathToUniqueOutput(
self.ComputeOutputFileName(spec))
- ldflags, manifest_files = self.msvs_settings.GetLdflags(config_name,
- self.GypPathToNinja, self.ExpandSpecial, manifest_name, is_executable)
+ ldflags, intermediate_manifest, manifest_files = \
+ self.msvs_settings.GetLdflags(config_name, self.GypPathToNinja,
+ self.ExpandSpecial, manifest_name,
+ is_executable, self.toplevel_build)
ldflags = env_ldflags + ldflags
self.WriteVariableList(ninja_file, 'manifests', manifest_files)
+ implicit_deps = implicit_deps.union(manifest_files)
+ if intermediate_manifest:
+ self.WriteVariableList(
+ ninja_file, 'intermediatemanifest', [intermediate_manifest])
command_suffix = _GetWinLinkRuleNameSuffix(
- self.msvs_settings.IsEmbedManifest(config_name),
- self.msvs_settings.IsLinkIncremental(config_name))
+ self.msvs_settings.IsEmbedManifest(config_name))
def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja)
if def_file:
implicit_deps.add(def_file)
@@ -1505,10 +1510,7 @@ def CalculateGeneratorInputInfo(params):
def OpenOutput(path, mode='w'):
"""Open |path| for writing, creating directories if necessary."""
- try:
- os.makedirs(os.path.dirname(path))
- except OSError:
- pass
+ gyp.common.EnsureDirExists(path)
return open(path, mode)
@@ -1567,63 +1569,28 @@ def GetDefaultConcurrentLinks():
return 1
-def _GetWinLinkRuleNameSuffix(embed_manifest, link_incremental):
+def _GetWinLinkRuleNameSuffix(embed_manifest):
"""Returns the suffix used to select an appropriate linking rule depending on
- whether the manifest embedding and/or incremental linking is enabled."""
- suffix = ''
- if embed_manifest:
- suffix += '_embed'
- if link_incremental:
- suffix += '_inc'
- return suffix
+ whether the manifest embedding is enabled."""
+ return '_embed' if embed_manifest else ''
-def _AddWinLinkRules(master_ninja, embed_manifest, link_incremental):
+def _AddWinLinkRules(master_ninja, embed_manifest):
"""Adds link rules for Windows platform to |master_ninja|."""
def FullLinkCommand(ldcmd, out, binary_type):
- """Returns a one-liner written for cmd.exe to handle multiphase linker
- operations including manifest file generation. The command will be
- structured as follows:
- cmd /c (linkcmd1 a b) && (linkcmd2 x y) && ... &&
- if not "$manifests"=="" ((manifestcmd1 a b) && (manifestcmd2 x y) && ... )
- Note that $manifests becomes empty when no manifest file is generated."""
- link_commands = ['%(ldcmd)s',
- 'if exist %(out)s.manifest del %(out)s.manifest']
- mt_cmd = ('%(python)s gyp-win-tool manifest-wrapper'
- ' $arch $mt -nologo -manifest $manifests')
- if embed_manifest and not link_incremental:
- # Embed manifest into a binary. If incremental linking is enabled,
- # embedding is postponed to the re-linking stage (see below).
- mt_cmd += ' -outputresource:%(out)s;%(resname)s'
- else:
- # Save manifest as an external file.
- mt_cmd += ' -out:%(out)s.manifest'
- manifest_commands = [mt_cmd]
- if link_incremental:
- # There is no point in generating separate rule for the case when
- # incremental linking is enabled, but manifest embedding is disabled.
- # In that case the basic rule should be used (e.g. 'link').
- # See also implementation of _GetWinLinkRuleNameSuffix().
- assert embed_manifest
- # Make .rc file out of manifest, compile it to .res file and re-link.
- manifest_commands += [
- ('%(python)s gyp-win-tool manifest-to-rc $arch %(out)s.manifest'
- ' %(out)s.manifest.rc %(resname)s'),
- '%(python)s gyp-win-tool rc-wrapper $arch $rc %(out)s.manifest.rc',
- '%(ldcmd)s %(out)s.manifest.res']
- cmd = 'cmd /c %s && if not "$manifests"=="" (%s)' % (
- ' && '.join(['(%s)' % c for c in link_commands]),
- ' && '.join(['(%s)' % c for c in manifest_commands]))
resource_name = {
'exe': '1',
'dll': '2',
}[binary_type]
- return cmd % {'python': sys.executable,
- 'out': out,
- 'ldcmd': ldcmd,
- 'resname': resource_name}
-
- rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest, link_incremental)
+ return '%(python)s gyp-win-tool link-with-manifests $arch %(embed)s ' \
+ '%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' \
+ '$manifests' % {
+ 'python': sys.executable,
+ 'out': out,
+ 'ldcmd': ldcmd,
+ 'resname': resource_name,
+ 'embed': embed_manifest }
+ rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest)
dlldesc = 'LINK%s(DLL) $dll' % rule_name_suffix.upper()
dllcmd = ('%s gyp-win-tool link-wrapper $arch '
'$ld /nologo $implibflag /DLL /OUT:$dll '
@@ -1915,12 +1882,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
sys.executable),
rspfile='$out.rsp',
rspfile_content='$in_newline $libflags')
- _AddWinLinkRules(master_ninja, embed_manifest=True, link_incremental=True)
- _AddWinLinkRules(master_ninja, embed_manifest=True, link_incremental=False)
- _AddWinLinkRules(master_ninja, embed_manifest=False, link_incremental=False)
- # Do not generate rules for embed_manifest=False and link_incremental=True
- # because in that case rules for (False, False) should be used (see
- # implementation of _GetWinLinkRuleNameSuffix()).
+ _AddWinLinkRules(master_ninja, embed_manifest=True)
+ _AddWinLinkRules(master_ninja, embed_manifest=False)
else:
master_ninja.rule(
'objc',
diff --git a/pylib/gyp/input.py b/pylib/gyp/input.py
index 9bc449d4..6472912d 100644
--- a/pylib/gyp/input.py
+++ b/pylib/gyp/input.py
@@ -822,8 +822,7 @@ def ExpandVariables(input, phase, variables, build_file):
rel_build_file_dir = build_file_dir
qualified_out_dir = generator_filelist_paths['qualified_out_dir']
path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
- if not os.path.isdir(os.path.dirname(path)):
- os.makedirs(os.path.dirname(path))
+ gyp.common.EnsureDirExists(path)
replacement = gyp.common.RelativePath(path, build_file_dir)
f = gyp.common.WriteOnDiff(path)
diff --git a/pylib/gyp/msvs_emulation.py b/pylib/gyp/msvs_emulation.py
index 723201eb..92ea86b7 100644
--- a/pylib/gyp/msvs_emulation.py
+++ b/pylib/gyp/msvs_emulation.py
@@ -454,7 +454,7 @@ class MsvsSettings(object):
return output_file
def GetLdflags(self, config, gyp_to_build_path, expand_special,
- manifest_base_name, is_executable):
+ manifest_base_name, is_executable, build_dir):
"""Returns the flags that need to be added to link commands, and the
manifest files."""
config = self._TargetConfig(config)
@@ -531,17 +531,21 @@ class MsvsSettings(object):
ldflags.append('/NXCOMPAT')
have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
- manifest_flags, manifest_files = self._GetLdManifestFlags(
- config, manifest_base_name, gyp_to_build_path,
- is_executable and not have_def_file)
+ manifest_flags, intermediate_manifest, manifest_files = \
+ self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
+ is_executable and not have_def_file, build_dir)
ldflags.extend(manifest_flags)
- return ldflags, manifest_files
+ return ldflags, intermediate_manifest, manifest_files
def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
- allow_isolation):
- """Returns the set of flags that need to be added to the link to generate
- a default manifest, as well as the list of all the manifest files to be
- merged by the manifest tool."""
+ allow_isolation, build_dir):
+ """Returns a 3-tuple:
+ - the set of flags that need to be added to the link to generate
+ a default manifest
+ - the intermediate manifest that the linker will generate that should be
+ used to assert it doesn't add anything to the merged one.
+ - the list of all the manifest files to be merged by the manifest tool and
+ included into the link."""
generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
config,
default='true')
@@ -549,7 +553,7 @@ class MsvsSettings(object):
# This means not only that the linker should not generate the intermediate
# manifest but also that the manifest tool should do nothing even when
# additional manifests are specified.
- return ['/MANIFEST:NO'], []
+ return ['/MANIFEST:NO'], [], []
output_name = name + '.intermediate.manifest'
flags = [
@@ -557,9 +561,25 @@ class MsvsSettings(object):
'/ManifestFile:' + output_name,
]
+ # Instead of using the MANIFESTUAC flags, we generate a .manifest to
+ # include into the list of manifests. This allows us to avoid the need to
+ # do two passes during linking. The /MANIFEST flag and /ManifestFile are
+ # still used, and the intermediate manifest is used to assert that the
+ # final manifest we get from merging all the additional manifest files
+ # (plus the one we generate here) isn't modified by merging the
+ # intermediate into it.
+
+ # Always NO, because we generate a manifest file that has what we want.
+ flags.append('/MANIFESTUAC:NO')
+
config = self._TargetConfig(config)
enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
default='true')
+ manifest_files = []
+ generated_manifest_outer = \
+"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \
+"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \
+"</assembly>"
if enable_uac == 'true':
execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
config, default='0')
@@ -571,18 +591,38 @@ class MsvsSettings(object):
ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
default='false')
- flags.append('''/MANIFESTUAC:"level='%s' uiAccess='%s'"''' %
- (execution_level_map[execution_level], ui_access))
+
+ inner = '''
+<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+ <security>
+ <requestedPrivileges>
+ <requestedExecutionLevel level='%s' uiAccess='%s' />
+ </requestedPrivileges>
+ </security>
+</trustInfo>''' % (execution_level_map[execution_level], ui_access)
else:
- flags.append('/MANIFESTUAC:NO')
+ inner = ''
+
+ generated_manifest_contents = generated_manifest_outer % inner
+ generated_name = name + '.generated.manifest'
+ # Need to join with the build_dir here as we're writing it during
+ # generation time, but we return the un-joined version because the build
+ # will occur in that directory. We only write the file if the contents
+ # have changed so that simply regenerating the project files doesn't
+ # cause a relink.
+ build_dir_generated_name = os.path.join(build_dir, generated_name)
+ gyp.common.EnsureDirExists(build_dir_generated_name)
+ f = gyp.common.WriteOnDiff(build_dir_generated_name)
+ f.write(generated_manifest_contents)
+ f.close()
+ manifest_files = [generated_name]
if allow_isolation:
flags.append('/ALLOWISOLATION')
- manifest_files = [output_name]
manifest_files += self._GetAdditionalManifestFiles(config,
gyp_to_build_path)
- return flags, manifest_files
+ return flags, output_name, manifest_files
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
"""Gets additional manifest files that are added to the default one
@@ -605,7 +645,8 @@ class MsvsSettings(object):
def IsEmbedManifest(self, config):
"""Returns whether manifest should be linked into binary."""
config = self._TargetConfig(config)
- embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config)
+ embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config,
+ default='true')
return embed == 'true'
def IsLinkIncremental(self, config):
diff --git a/pylib/gyp/ordered_dict.py b/pylib/gyp/ordered_dict.py
new file mode 100644
index 00000000..083b9f1a
--- /dev/null
+++ b/pylib/gyp/ordered_dict.py
@@ -0,0 +1,266 @@
+# Unmodified from http://code.activestate.com/recipes/576693/
+# Linked from Python documentation here:
+# http://docs.python.org/2/library/collections.html#collections.OrderedDict
+#
+# This should be deleted once Py2.7 is available on all bots, see
+# http://crbug.com/241769.
+
+# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
+# Passes Python2.7's test suite and incorporates all the latest updates.
+
+try:
+ from thread import get_ident as _get_ident
+except ImportError:
+ from dummy_thread import get_ident as _get_ident
+
+try:
+ from _abcoll import KeysView, ValuesView, ItemsView
+except ImportError:
+ pass
+
+
+class OrderedDict(dict):
+ 'Dictionary that remembers insertion order'
+ # An inherited dict maps keys to values.
+ # The inherited dict provides __getitem__, __len__, __contains__, and get.
+ # The remaining methods are order-aware.
+ # Big-O running times for all methods are the same as for regular dictionaries.
+
+ # The internal self.__map dictionary maps keys to links in a doubly linked list.
+ # The circular doubly linked list starts and ends with a sentinel element.
+ # The sentinel element never gets deleted (this simplifies the algorithm).
+ # Each link is stored as a list of length three: [PREV, NEXT, KEY].
+
+ def __init__(self, *args, **kwds):
+ '''Initialize an ordered dictionary. Signature is the same as for
+ regular dictionaries, but keyword arguments are not recommended
+ because their insertion order is arbitrary.
+
+ '''
+ if len(args) > 1:
+ raise TypeError('expected at most 1 arguments, got %d' % len(args))
+ try:
+ self.__root
+ except AttributeError:
+ self.__root = root = [] # sentinel node
+ root[:] = [root, root, None]
+ self.__map = {}
+ self.__update(*args, **kwds)
+
+ def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
+ 'od.__setitem__(i, y) <==> od[i]=y'
+ # Setting a new item creates a new link which goes at the end of the linked
+ # list, and the inherited dictionary is updated with the new key/value pair.
+ if key not in self:
+ root = self.__root
+ last = root[0]
+ last[1] = root[0] = self.__map[key] = [last, root, key]
+ dict_setitem(self, key, value)
+
+ def __delitem__(self, key, dict_delitem=dict.__delitem__):
+ 'od.__delitem__(y) <==> del od[y]'
+ # Deleting an existing item uses self.__map to find the link which is
+ # then removed by updating the links in the predecessor and successor nodes.
+ dict_delitem(self, key)
+ link_prev, link_next, key = self.__map.pop(key)
+ link_prev[1] = link_next
+ link_next[0] = link_prev
+
+ def __iter__(self):
+ 'od.__iter__() <==> iter(od)'
+ root = self.__root
+ curr = root[1]
+ while curr is not root:
+ yield curr[2]
+ curr = curr[1]
+
+ def __reversed__(self):
+ 'od.__reversed__() <==> reversed(od)'
+ root = self.__root
+ curr = root[0]
+ while curr is not root:
+ yield curr[2]
+ curr = curr[0]
+
+ def clear(self):
+ 'od.clear() -> None. Remove all items from od.'
+ try:
+ for node in self.__map.itervalues():
+ del node[:]
+ root = self.__root
+ root[:] = [root, root, None]
+ self.__map.clear()
+ except AttributeError:
+ pass
+ dict.clear(self)
+
+ def popitem(self, last=True):
+ '''od.popitem() -> (k, v), return and remove a (key, value) pair.
+ Pairs are returned in LIFO order if last is true or FIFO order if false.
+
+ '''
+ if not self:
+ raise KeyError('dictionary is empty')
+ root = self.__root
+ if last:
+ link = root[0]
+ link_prev = link[0]
+ link_prev[1] = root
+ root[0] = link_prev
+ else:
+ link = root[1]
+ link_next = link[1]
+ root[1] = link_next
+ link_next[0] = root
+ key = link[2]
+ del self.__map[key]
+ value = dict.pop(self, key)
+ return key, value
+
+ # -- the following methods do not depend on the internal structure --
+
+ def keys(self):
+ 'od.keys() -> list of keys in od'
+ return list(self)
+
+ def values(self):
+ 'od.values() -> list of values in od'
+ return [self[key] for key in self]
+
+ def items(self):
+ 'od.items() -> list of (key, value) pairs in od'
+ return [(key, self[key]) for key in self]
+
+ def iterkeys(self):
+ 'od.iterkeys() -> an iterator over the keys in od'
+ return iter(self)
+
+ def itervalues(self):
+ 'od.itervalues -> an iterator over the values in od'
+ for k in self:
+ yield self[k]
+
+ def iteritems(self):
+ 'od.iteritems -> an iterator over the (key, value) items in od'
+ for k in self:
+ yield (k, self[k])
+
+ def update(*args, **kwds):
+ '''od.update(E, **F) -> None. Update od from dict/iterable E and F.
+
+ If E is a dict instance, does: for k in E: od[k] = E[k]
+ If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
+ Or if E is an iterable of items, does: for k, v in E: od[k] = v
+ In either case, this is followed by: for k, v in F.items(): od[k] = v
+
+ '''
+ if len(args) > 2:
+ raise TypeError('update() takes at most 2 positional '
+ 'arguments (%d given)' % (len(args),))
+ elif not args:
+ raise TypeError('update() takes at least 1 argument (0 given)')
+ self = args[0]
+ # Make progressively weaker assumptions about "other"
+ other = ()
+ if len(args) == 2:
+ other = args[1]
+ if isinstance(other, dict):
+ for key in other:
+ self[key] = other[key]
+ elif hasattr(other, 'keys'):
+ for key in other.keys():
+ self[key] = other[key]
+ else:
+ for key, value in other:
+ self[key] = value
+ for key, value in kwds.items():
+ self[key] = value
+
+ __update = update # let subclasses override update without breaking __init__
+
+ __marker = object()
+
+ def pop(self, key, default=__marker):
+ '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+ If key is not found, d is returned if given, otherwise KeyError is raised.
+
+ '''
+ if key in self:
+ result = self[key]
+ del self[key]
+ return result
+ if default is self.__marker:
+ raise KeyError(key)
+ return default
+
+ def setdefault(self, key, default=None):
+ 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
+ if key in self:
+ return self[key]
+ self[key] = default
+ return default
+
+ def __repr__(self, _repr_running={}):
+ 'od.__repr__() <==> repr(od)'
+ call_key = id(self), _get_ident()
+ if call_key in _repr_running:
+ return '...'
+ _repr_running[call_key] = 1
+ try:
+ if not self:
+ return '%s()' % (self.__class__.__name__,)
+ return '%s(%r)' % (self.__class__.__name__, self.items())
+ finally:
+ del _repr_running[call_key]
+
+ def __reduce__(self):
+ 'Return state information for pickling'
+ items = [[k, self[k]] for k in self]
+ inst_dict = vars(self).copy()
+ for k in vars(OrderedDict()):
+ inst_dict.pop(k, None)
+ if inst_dict:
+ return (self.__class__, (items,), inst_dict)
+ return self.__class__, (items,)
+
+ def copy(self):
+ 'od.copy() -> a shallow copy of od'
+ return self.__class__(self)
+
+ @classmethod
+ def fromkeys(cls, iterable, value=None):
+ '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
+ and values equal to v (which defaults to None).
+
+ '''
+ d = cls()
+ for key in iterable:
+ d[key] = value
+ return d
+
+ def __eq__(self, other):
+ '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
+ while comparison to a regular mapping is order-insensitive.
+
+ '''
+ if isinstance(other, OrderedDict):
+ return len(self)==len(other) and self.items() == other.items()
+ return dict.__eq__(self, other)
+
+ def __ne__(self, other):
+ return not self == other
+
+ # -- the following methods are only used in Python 2.7 --
+
+ def viewkeys(self):
+ "od.viewkeys() -> a set-like object providing a view on od's keys"
+ return KeysView(self)
+
+ def viewvalues(self):
+ "od.viewvalues() -> an object providing a view on od's values"
+ return ValuesView(self)
+
+ def viewitems(self):
+ "od.viewitems() -> a set-like object providing a view on od's items"
+ return ItemsView(self)
+
diff --git a/pylib/gyp/win_tool.py b/pylib/gyp/win_tool.py
index 7f3b0a54..1634ff93 100755
--- a/pylib/gyp/win_tool.py
+++ b/pylib/gyp/win_tool.py
@@ -13,6 +13,7 @@ import os
import re
import shutil
import subprocess
+import string
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
@@ -116,6 +117,82 @@ class WinTool(object):
print line
return link.returncode
+ def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname,
+ mt, rc, intermediate_manifest, *manifests):
+ """A wrapper for handling creating a manifest resource and then executing
+ a link command."""
+ # The 'normal' way to do manifests is to have link generate a manifest
+ # based on gathering dependencies from the object files, then merge that
+ # manifest with other manifests supplied as sources, convert the merged
+ # manifest to a resource, and then *relink*, including the compiled
+ # version of the manifest resource. This breaks incremental linking, and
+ # is generally overly complicated. Instead, we merge all the manifests
+ # provided (along with one that includes what would normally be in the
+ # linker-generated one, see msvs_emulation.py), and include that into the
+ # first and only link. We still tell link to generate a manifest, but we
+ # only use that to assert that our simpler process did not miss anything.
+ variables = {
+ 'python': sys.executable,
+ 'arch': arch,
+ 'out': out,
+ 'ldcmd': ldcmd,
+ 'resname': resname,
+ 'mt': mt,
+ 'rc': rc,
+ 'intermediate_manifest': intermediate_manifest,
+ 'manifests': ' '.join(manifests),
+ }
+ add_to_ld = ''
+ if manifests:
+ subprocess.check_call(
+ '%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
+ '-manifest %(manifests)s -out:%(out)s.manifest' % variables)
+ if embed_manifest == 'True':
+ subprocess.check_call(
+ '%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest'
+ ' %(out)s.manifest.rc %(resname)s' % variables)
+ subprocess.check_call(
+ '%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s '
+ '%(out)s.manifest.rc' % variables)
+ add_to_ld = ' %(out)s.manifest.res' % variables
+ subprocess.check_call(ldcmd + add_to_ld)
+
+ # Run mt.exe on the theoretically complete manifest we generated, merging
+ # it with the one the linker generated to confirm that the linker
+ # generated one does not add anything. This is strictly unnecessary for
+ # correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
+ # used in a #pragma comment.
+ if manifests:
+ # Merge the intermediate one with ours to .assert.manifest, then check
+ # that .assert.manifest is identical to ours.
+ subprocess.check_call(
+ '%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
+ '-manifest %(out)s.manifest %(intermediate_manifest)s '
+ '-out:%(out)s.assert.manifest' % variables)
+ assert_manifest = '%(out)s.assert.manifest' % variables
+ our_manifest = '%(out)s.manifest' % variables
+ # Load and normalize the manifests. mt.exe sometimes removes whitespace,
+ # and sometimes doesn't unfortunately.
+ with open(our_manifest, 'rb') as our_f:
+ with open(assert_manifest, 'rb') as assert_f:
+ our_data = our_f.read().translate(None, string.whitespace)
+ assert_data = assert_f.read().translate(None, string.whitespace)
+ if our_data != assert_data:
+ os.unlink(out)
+ def dump(filename):
+ sys.stderr.write('%s\n-----\n' % filename)
+ with open(filename, 'rb') as f:
+ sys.stderr.write(f.read() + '\n-----\n')
+ dump(intermediate_manifest)
+ dump(our_manifest)
+ dump(assert_manifest)
+ sys.stderr.write(
+ 'Linker generated manifest "%s" added to final manifest "%s" '
+ '(result in "%s"). '
+ 'Were /MANIFEST switches used in #pragma statements? ' % (
+ intermediate_manifest, our_manifest, assert_manifest))
+ return 1
+
def ExecManifestWrapper(self, arch, *args):
"""Run manifest tool with environment set. Strip out undesirable warning
(some XML blocks are recognized by the OS loader, but not the manifest
diff --git a/pylib/gyp/xcode_emulation.py b/pylib/gyp/xcode_emulation.py
index 5e50f10d..520dcc4d 100644
--- a/pylib/gyp/xcode_emulation.py
+++ b/pylib/gyp/xcode_emulation.py
@@ -24,6 +24,7 @@ class XcodeSettings(object):
# Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
# at class-level for efficiency.
_sdk_path_cache = {}
+ _sdk_root_cache = {}
# Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
# cached at class-level for efficiency.
@@ -290,9 +291,14 @@ class XcodeSettings(object):
sdk_root = self._SdkRoot(configname)
if sdk_root.startswith('/'):
return sdk_root
+ return self._XcodeSdkPath(sdk_root)
+
+ def _XcodeSdkPath(self, sdk_root):
if sdk_root not in XcodeSettings._sdk_path_cache:
- XcodeSettings._sdk_path_cache[sdk_root] = self._GetSdkVersionInfoItem(
- sdk_root, 'Path')
+ sdk_path = self._GetSdkVersionInfoItem(sdk_root, 'Path')
+ XcodeSettings._sdk_path_cache[sdk_root] = sdk_path
+ if sdk_root:
+ XcodeSettings._sdk_root_cache[sdk_path] = sdk_root
return XcodeSettings._sdk_path_cache[sdk_root]
def _AppendPlatformVersionMinFlags(self, lst):
@@ -885,6 +891,8 @@ class XcodeSettings(object):
cache['DTXcodeBuild'] = xcode_build
sdk_root = self._SdkRoot(configname)
+ if not sdk_root:
+ sdk_root = self._DefaultSdkRoot()
cache['DTSDKName'] = sdk_root
if xcode >= '0430':
cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
@@ -909,6 +917,29 @@ class XcodeSettings(object):
items['UIDeviceFamily'] = self._XcodeIOSDeviceFamily(configname)
return items
+ def _DefaultSdkRoot(self):
+ """Returns the default SDKROOT to use.
+
+ Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
+ project, then the environment variable was empty. Starting with this
+ version, Xcode uses the name of the newest SDK installed.
+ """
+ if self._XcodeVersion() < '0500':
+ return ''
+ default_sdk_path = self._XcodeSdkPath('')
+ default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
+ if default_sdk_root:
+ return default_sdk_root
+ all_sdks = self._GetStdout(['xcodebuild', '-showsdks'])
+ for line in all_sdks.splitlines():
+ items = line.split()
+ if len(items) >= 3 and items[-2] == '-sdk':
+ sdk_root = items[-1]
+ sdk_path = self._XcodeSdkPath(sdk_root)
+ if sdk_path == default_sdk_path:
+ return sdk_root
+ return ''
+
def _DefaultArch(self):
# For Mac projects, Xcode changed the default value used when ARCHS is not
# set from "i386" to "x86_64".
diff --git a/test/mac/gyptest-app.py b/test/mac/gyptest-app.py
index c84c92fd..b50e1a84 100755
--- a/test/mac/gyptest-app.py
+++ b/test/mac/gyptest-app.py
@@ -32,6 +32,11 @@ def ls(path):
result.append(os.path.join(dirpath, f)[len(path) + 1:])
return result
+def XcodeVersion():
+ stdout = subprocess.check_output(['xcodebuild', '-version'])
+ version = stdout.splitlines()[0].split()[-1].replace('.', '')
+ return (version + '0' * (3 - len(version))).zfill(4)
+
if sys.platform == 'darwin':
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
@@ -56,7 +61,15 @@ if sys.platform == 'darwin':
plist = plistlib.readPlist(info_plist)
ExpectEq(GetStdout(['sw_vers', '-buildVersion']),
plist['BuildMachineOSBuild'])
- ExpectEq('', plist['DTSDKName'])
+
+ # Prior to Xcode 5.0.0, SDKROOT (and thus DTSDKName) was only defined if
+ # set in the Xcode project file. Starting with that version, it is always
+ # defined.
+ expected = ''
+ if XcodeVersion() >= '0500':
+ version = GetStdout(['xcodebuild', '-version', '-sdk', '', 'SDKVersion'])
+ expected = 'macosx' + version
+ ExpectEq(expected, plist['DTSDKName'])
sdkbuild = GetStdout(
['xcodebuild', '-version', '-sdk', '', 'ProductBuildVersion'])
if not sdkbuild:
diff --git a/test/mac/gyptest-sdkroot.py b/test/mac/gyptest-sdkroot.py
index 20edd365..711726ed 100644
--- a/test/mac/gyptest-sdkroot.py
+++ b/test/mac/gyptest-sdkroot.py
@@ -17,22 +17,29 @@ import sys
if sys.platform == 'darwin':
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+ def GetSDKPath(sdk):
+ """Return SDKROOT if the SDK version |sdk| is installed or empty string."""
+ DEVNULL = open(os.devnull, 'wb')
+ try:
+ proc = subprocess.Popen(
+ ['xcodebuild', '-version', '-sdk', 'macosx' + sdk, 'Path'],
+ stdout=subprocess.PIPE, stderr=DEVNULL)
+ return proc.communicate()[0].rstrip('\n')
+ finally:
+ DEVNULL.close()
+
+ def SelectSDK():
+ """Select the oldest SDK installed (greater than 10.6)."""
+ for sdk in ['10.6', '10.7', '10.8', '10.9']:
+ path = GetSDKPath(sdk)
+ if path:
+ return True, sdk, path
+ return False, '', ''
+
# Make sure this works on the bots, which only have the 10.6 sdk, and on
- # dev machines, which usually don't have the 10.6 sdk.
- sdk = '10.6'
- DEVNULL = open(os.devnull, 'wb')
- proc = subprocess.Popen(['xcodebuild', '-version', '-sdk', 'macosx' + sdk],
- stdout=DEVNULL, stderr=DEVNULL)
- proc.communicate()
- DEVNULL.close()
- if proc.returncode:
- sdk = '10.7'
-
- proc = subprocess.Popen(['xcodebuild', '-version',
- '-sdk', 'macosx' + sdk, 'Path'],
- stdout=subprocess.PIPE)
- sdk_path = proc.communicate()[0].rstrip('\n')
- if proc.returncode != 0:
+ # dev machines which usually don't have the 10.6 sdk.
+ sdk_found, sdk, sdk_path = SelectSDK()
+ if not sdk_found:
test.fail_test()
test.write('sdkroot/test.gyp', test.read('sdkroot/test.gyp') % sdk)
diff --git a/test/mac/gyptest-xcode-env-order.py b/test/mac/gyptest-xcode-env-order.py
index 58b146c1..decf2b3c 100755
--- a/test/mac/gyptest-xcode-env-order.py
+++ b/test/mac/gyptest-xcode-env-order.py
@@ -10,8 +10,14 @@ Verifies that dependent Xcode settings are processed correctly.
import TestGyp
+import subprocess
import sys
+def XcodeVersion():
+ stdout = subprocess.check_output(['xcodebuild', '-version'])
+ version = stdout.splitlines()[0].split()[-1].replace('.', '')
+ return (version + '0' * (3 - len(version))).zfill(4)
+
if sys.platform == 'darwin':
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
@@ -71,10 +77,15 @@ if sys.platform == 'darwin':
# if it's not right at the start of the string (e.g. ':$PRODUCT_TYPE'), so
# this looks like an Xcode bug. This bug isn't emulated (yet?), so check this
# only for Xcode.
- if test.format == 'xcode':
+ if test.format == 'xcode' and XcodeVersion() < '0500':
test.must_contain(info_plist, '''\
\t<key>BareProcessedKey3</key>
\t<string>$PRODUCT_TYPE:D:/Source/Project/Test</string>''')
+ else:
+ # The bug has been fixed by Xcode version 5.0.0.
+ test.must_contain(info_plist, '''\
+\t<key>BareProcessedKey3</key>
+\t<string>com.apple.product-type.application:D:/Source/Project/Test</string>''')
test.must_contain(info_plist, '''\
\t<key>MixedProcessedKey</key>
diff --git a/test/mac/gyptest-xcode-gcc.py b/test/mac/gyptest-xcode-gcc.py
index abd88489..e45d0b5e 100644
--- a/test/mac/gyptest-xcode-gcc.py
+++ b/test/mac/gyptest-xcode-gcc.py
@@ -11,11 +11,23 @@ Verifies that xcode-style GCC_... settings are handled properly.
import TestGyp
import os
+import subprocess
import sys
def IgnoreOutput(string, expected_string):
return True
+def CompilerVersion(compiler):
+ stdout = subprocess.check_output([compiler, '-v'], stderr=subprocess.STDOUT)
+ return stdout.rstrip('\n')
+
+def CompilerSupportsWarnAboutInvalidOffsetOfMacro(test):
+ # "clang" does not support the "-Winvalid-offsetof" flag, and silently
+ # ignore it. Starting with Xcode 5.0.0, "gcc" is just a "clang" binary with
+ # some hard-coded include path hack, so use the output of "-v" to detect if
+ # the compiler supports the flag or not.
+ return 'clang' not in CompilerVersion('/usr/bin/cc')
+
if sys.platform == 'darwin':
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
@@ -30,11 +42,7 @@ if sys.platform == 'darwin':
# clang doesn't warn on invalid offsetofs, it silently ignores
# -Wno-invalid-offsetof.
- # TODO(thakis): This isn't really the right way to detect the compiler,
- # Xcode has some embedded compiler, but it's a reliable proxy at least on
- # the bots. The compiler is forced to gcc/g++ in the gyp file in a
- # make_global_settings section for ninja and make.
- if test.format != 'xcode' or os.readlink('/usr/bin/cc') != 'clang':
+ if CompilerSupportsWarnAboutInvalidOffsetOfMacro(test):
targets.append('warn_about_invalid_offsetof_macro')
for target in targets:
diff --git a/test/mac/sdkroot/test_shorthand.sh b/test/mac/sdkroot/test_shorthand.sh
index d768fba0..ac4ac229 100755
--- a/test/mac/sdkroot/test_shorthand.sh
+++ b/test/mac/sdkroot/test_shorthand.sh
@@ -5,8 +5,16 @@
set -e
-if ! expected=$(xcodebuild -version -sdk macosx10.6 Path 2>/dev/null) ; then
- expected=$(xcodebuild -version -sdk macosx10.7 Path)
+found=false
+for sdk in 10.6 10.7 10.8 10.9 ; do
+ if expected=$(xcodebuild -version -sdk macosx$sdk Path 2>/dev/null) ; then
+ found=true
+ break
+ fi
+done
+if ! $found ; then
+ echo >&2 "cannot find installed SDK"
+ exit 1
fi
test $SDKROOT = $expected
diff --git a/test/mac/xcode-gcc/test.gyp b/test/mac/xcode-gcc/test.gyp
index c0fcb504..1ca8b215 100644
--- a/test/mac/xcode-gcc/test.gyp
+++ b/test/mac/xcode-gcc/test.gyp
@@ -2,11 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
- 'make_global_settings': [
- ['CC', '/usr/bin/gcc'],
- ['CXX', '/usr/bin/g++'],
- ],
-
'target_defaults': {
'xcode_settings': {
'GCC_TREAT_WARNINGS_AS_ERRORS': 'YES',
diff --git a/test/win/gyptest-link-generate-manifest.py b/test/win/gyptest-link-generate-manifest.py
index ff03afd3..77c9228e 100644
--- a/test/win/gyptest-link-generate-manifest.py
+++ b/test/win/gyptest-link-generate-manifest.py
@@ -52,6 +52,10 @@ if sys.platform == 'win32':
test.run_gyp('generate-manifest.gyp', chdir=CHDIR)
test.build('generate-manifest.gyp', test.ALL, chdir=CHDIR)
+ # Make sure that generation of .generated.manifest does not cause a relink.
+ test.run_gyp('generate-manifest.gyp', chdir=CHDIR)
+ test.up_to_date('generate-manifest.gyp', test.ALL, chdir=CHDIR)
+
def test_manifest(filename, generate_manifest, embedded_manifest,
extra_manifest):
exe_file = test.built_file_path(filename, chdir=CHDIR)
@@ -116,4 +120,8 @@ if sys.platform == 'win32':
generate_manifest=False,
embedded_manifest=False,
extra_manifest=True)
+ test_manifest('test_generate_manifest_default_embed_default.exe',
+ generate_manifest=True,
+ embedded_manifest=True,
+ extra_manifest=False)
test.pass_test()
diff --git a/test/win/gyptest-link-ordering.py b/test/win/gyptest-link-ordering.py
new file mode 100644
index 00000000..4928583a
--- /dev/null
+++ b/test/win/gyptest-link-ordering.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure the link order of object files is the same between msvs and ninja.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('link-ordering.gyp', chdir=CHDIR)
+ test.build('link-ordering.gyp', test.ALL, chdir=CHDIR)
+
+ def GetDisasm(exe):
+ full_path = test.built_file_path(exe, chdir=CHDIR)
+ # Get disassembly and drop int3 padding between functions.
+ return '\n'.join(
+ x for x in test.run_dumpbin('/disasm', full_path).splitlines()
+ if 'CC' not in x)
+
+ # This is the full dump that we expect. The source files in the .gyp match
+ # this order which is what determines the ordering in the binary.
+
+ expected_disasm_basic = '''
+_mainCRTStartup:
+ 00401000: B8 05 00 00 00 mov eax,5
+ 00401005: C3 ret
+?z@@YAHXZ:
+ 00401010: B8 03 00 00 00 mov eax,3
+ 00401015: C3 ret
+?x@@YAHXZ:
+ 00401020: B8 01 00 00 00 mov eax,1
+ 00401025: C3 ret
+?y@@YAHXZ:
+ 00401030: B8 02 00 00 00 mov eax,2
+ 00401035: C3 ret
+_main:
+ 00401040: 33 C0 xor eax,eax
+ 00401042: C3 ret
+'''
+
+ if expected_disasm_basic not in GetDisasm('test_ordering_exe.exe'):
+ print GetDisasm('test_ordering_exe.exe')
+ test.fail_test()
+
+ # Similar to above. The VS generator handles subdirectories differently.
+
+ expected_disasm_subdirs = '''
+_mainCRTStartup:
+ 00401000: B8 05 00 00 00 mov eax,5
+ 00401005: C3 ret
+_main:
+ 00401010: 33 C0 xor eax,eax
+ 00401012: C3 ret
+?y@@YAHXZ:
+ 00401020: B8 02 00 00 00 mov eax,2
+ 00401025: C3 ret
+?z@@YAHXZ:
+ 00401030: B8 03 00 00 00 mov eax,3
+ 00401035: C3 ret
+'''
+
+ if expected_disasm_subdirs not in GetDisasm('test_ordering_subdirs.exe'):
+ print GetDisasm('test_ordering_subdirs.exe')
+ test.fail_test()
+
+ test.pass_test()
diff --git a/test/win/gyptest-link-unsupported-manifest.py b/test/win/gyptest-link-unsupported-manifest.py
new file mode 100644
index 00000000..8f7e12bc
--- /dev/null
+++ b/test/win/gyptest-link-unsupported-manifest.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure we error out if #pragma comments are used to modify manifests.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+ # This assertion only applies to the ninja build.
+ test = TestGyp.TestGyp(formats=['ninja'])
+
+ CHDIR = 'linker-flags'
+ test.run_gyp('unsupported-manifest.gyp', chdir=CHDIR)
+
+ # Just needs to fail to build.
+ test.build('unsupported-manifest.gyp',
+ 'test_unsupported', chdir=CHDIR, status=1)
+ test.must_not_exist(test.built_file_path('test_unsupported.exe', chdir=CHDIR))
+
+ test.pass_test()
diff --git a/test/win/gyptest-link-update-manifest.py b/test/win/gyptest-link-update-manifest.py
new file mode 100644
index 00000000..4f8b2b91
--- /dev/null
+++ b/test/win/gyptest-link-update-manifest.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure binary is relinked when manifest settings are changed.
+"""
+
+import TestGyp
+
+import os
+import sys
+
+if sys.platform == 'win32':
+ import pywintypes
+ import win32api
+ import winerror
+
+ RT_MANIFEST = 24
+
+ class LoadLibrary(object):
+ """Context manager for loading and releasing binaries in Windows.
+ Yields the handle of the binary loaded."""
+ def __init__(self, path):
+ self._path = path
+ self._handle = None
+
+ def __enter__(self):
+ self._handle = win32api.LoadLibrary(self._path)
+ return self._handle
+
+ def __exit__(self, type, value, traceback):
+ win32api.FreeLibrary(self._handle)
+
+ def extract_manifest(path, resource_name):
+ """Reads manifest from |path| and returns it as a string.
+ Returns None is there is no such manifest."""
+ with LoadLibrary(path) as handle:
+ try:
+ return win32api.LoadResource(handle, RT_MANIFEST, resource_name)
+ except pywintypes.error as error:
+ if error.args[0] == winerror.ERROR_RESOURCE_DATA_NOT_FOUND:
+ return None
+ else:
+ raise
+
+ test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+ CHDIR = 'linker-flags'
+
+ gyp_template = '''
+{
+ 'targets': [
+ {
+ 'target_name': 'test_update_manifest',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'EnableUAC': 'true',
+ 'UACExecutionLevel': '%(uac_execution_level)d',
+ },
+ 'VCManifestTool': {
+ 'EmbedManifest': 'true',
+ 'AdditionalManifestFiles': '%(additional_manifest_files)s',
+ },
+ },
+ },
+ ],
+}
+'''
+
+ gypfile = 'update-manifest.gyp'
+
+ def WriteAndUpdate(uac_execution_level, additional_manifest_files, do_build):
+ with open(os.path.join(CHDIR, gypfile), 'wb') as f:
+ f.write(gyp_template % {
+ 'uac_execution_level': uac_execution_level,
+ 'additional_manifest_files': additional_manifest_files,
+ })
+ test.run_gyp(gypfile, chdir=CHDIR)
+ if do_build:
+ test.build(gypfile, chdir=CHDIR)
+ exe_file = test.built_file_path('test_update_manifest.exe', chdir=CHDIR)
+ return extract_manifest(exe_file, 1)
+
+ manifest = WriteAndUpdate(0, '', True)
+ test.fail_test('asInvoker' not in manifest)
+ test.fail_test('35138b9a-5d96-4fbd-8e2d-a2440225f93a' in manifest)
+
+ # Make sure that updating .gyp and regenerating doesn't cause a rebuild.
+ WriteAndUpdate(0, '', False)
+ test.up_to_date(gypfile, test.ALL, chdir=CHDIR)
+
+ # But make sure that changing a manifest property does cause a relink.
+ manifest = WriteAndUpdate(2, '', True)
+ test.fail_test('requireAdministrator' not in manifest)
+
+ # Adding a manifest causes a rebuild.
+ manifest = WriteAndUpdate(2, 'extra.manifest', True)
+ test.fail_test('35138b9a-5d96-4fbd-8e2d-a2440225f93a' not in manifest)
diff --git a/test/win/linker-flags/a/z.cc b/test/win/linker-flags/a/z.cc
new file mode 100644
index 00000000..8a435012
--- /dev/null
+++ b/test/win/linker-flags/a/z.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int z() {
+ return 3;
+}
diff --git a/test/win/linker-flags/b/y.cc b/test/win/linker-flags/b/y.cc
new file mode 100644
index 00000000..bd884119
--- /dev/null
+++ b/test/win/linker-flags/b/y.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int y() {
+ return 2;
+}
diff --git a/test/win/linker-flags/generate-manifest.gyp b/test/win/linker-flags/generate-manifest.gyp
index 41f888fa..34a68d1a 100644
--- a/test/win/linker-flags/generate-manifest.gyp
+++ b/test/win/linker-flags/generate-manifest.gyp
@@ -152,5 +152,15 @@
},
},
},
+ {
+ 'target_name': 'test_generate_manifest_default_embed_default',
+ 'type': 'executable',
+ 'sources': ['hello.cc'],
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'EnableUAC': 'true',
+ },
+ },
+ },
]
}
diff --git a/test/win/linker-flags/link-ordering.gyp b/test/win/linker-flags/link-ordering.gyp
new file mode 100644
index 00000000..36a7e54f
--- /dev/null
+++ b/test/win/linker-flags/link-ordering.gyp
@@ -0,0 +1,65 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_ordering_exe',
+ 'type': 'executable',
+ # These are so the names of the functions appear in the disassembly.
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3',
+ 'Optimization': '2',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'LinkIncremental': '1',
+ 'GenerateManifest': 'false',
+ # Minimize the disassembly to just our code.
+ 'AdditionalOptions': [
+ '/NODEFAULTLIB',
+ ],
+ },
+ },
+ 'sources': [
+ # Explicitly sorted the same way as the disassembly in the test .py.
+ 'main-crt.c',
+ 'z.cc',
+ 'x.cc',
+ 'y.cc',
+ 'hello.cc',
+ ],
+ },
+
+ {
+ 'target_name': 'test_ordering_subdirs',
+ 'type': 'executable',
+ # These are so the names of the functions appear in the disassembly.
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'DebugInformationFormat': '3',
+ 'Optimization': '2',
+ },
+ 'VCLinkerTool': {
+ 'GenerateDebugInformation': 'true',
+ 'LinkIncremental': '1',
+ 'GenerateManifest': 'false',
+ # Minimize the disassembly to just our code.
+ 'AdditionalOptions': [
+ '/NODEFAULTLIB',
+ ],
+ },
+ },
+ 'sources': [
+ # Explicitly sorted the same way as the disassembly in the test .py.
+ 'main-crt.c',
+ 'hello.cc',
+ 'b/y.cc',
+ 'a/z.cc',
+ ],
+ },
+
+ ]
+}
diff --git a/test/win/linker-flags/main-crt.c b/test/win/linker-flags/main-crt.c
new file mode 100644
index 00000000..bdc80c54
--- /dev/null
+++ b/test/win/linker-flags/main-crt.c
@@ -0,0 +1,8 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Stub so we can link with /NODEFAULTLIB when checking disasm.
+int mainCRTStartup() {
+ return 5;
+}
diff --git a/test/win/linker-flags/manifest-in-comment.cc b/test/win/linker-flags/manifest-in-comment.cc
new file mode 100644
index 00000000..ae54ae54
--- /dev/null
+++ b/test/win/linker-flags/manifest-in-comment.cc
@@ -0,0 +1,13 @@
+// Copyright 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#pragma comment(linker, \
+ "\"/manifestdependency:type='Win32' " \
+ "name='Test.Research.SampleAssembly' version='6.0.0.0' " \
+ "processorArchitecture='X86' " \
+ "publicKeyToken='0000000000000000' language='*'\"")
+
+int main() {
+ return 0;
+}
diff --git a/test/win/linker-flags/unsupported-manifest.gyp b/test/win/linker-flags/unsupported-manifest.gyp
new file mode 100644
index 00000000..5549e7cb
--- /dev/null
+++ b/test/win/linker-flags/unsupported-manifest.gyp
@@ -0,0 +1,13 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test_unsupported',
+ 'type': 'executable',
+ 'sources': ['manifest-in-comment.cc'],
+ },
+ ],
+}
diff --git a/test/win/linker-flags/x.cc b/test/win/linker-flags/x.cc
new file mode 100644
index 00000000..f5f763b0
--- /dev/null
+++ b/test/win/linker-flags/x.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int x() {
+ return 1;
+}
diff --git a/test/win/linker-flags/y.cc b/test/win/linker-flags/y.cc
new file mode 100644
index 00000000..bd884119
--- /dev/null
+++ b/test/win/linker-flags/y.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int y() {
+ return 2;
+}
diff --git a/test/win/linker-flags/z.cc b/test/win/linker-flags/z.cc
new file mode 100644
index 00000000..8a435012
--- /dev/null
+++ b/test/win/linker-flags/z.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int z() {
+ return 3;
+}