summaryrefslogtreecommitdiff
path: root/grpc/tools/buildgen/extract_metadata_from_bazel_xml.py
diff options
context:
space:
mode:
Diffstat (limited to 'grpc/tools/buildgen/extract_metadata_from_bazel_xml.py')
-rwxr-xr-xgrpc/tools/buildgen/extract_metadata_from_bazel_xml.py448
1 files changed, 225 insertions, 223 deletions
diff --git a/grpc/tools/buildgen/extract_metadata_from_bazel_xml.py b/grpc/tools/buildgen/extract_metadata_from_bazel_xml.py
index 63f8e2e5..55b8dd12 100755
--- a/grpc/tools/buildgen/extract_metadata_from_bazel_xml.py
+++ b/grpc/tools/buildgen/extract_metadata_from_bazel_xml.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -34,14 +34,18 @@ import subprocess
import yaml
import xml.etree.ElementTree as ET
import os
+import collections
import sys
+import re
+from typing import List, Any, Dict, Optional, Iterable
import build_cleaner
-_ROOT = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../..'))
-os.chdir(_ROOT)
+BuildMetadata = Dict[str, Any]
+BuildDict = Dict[str, BuildMetadata]
+BuildYaml = Dict[str, Any]
-def _bazel_query_xml_tree(query):
+def _bazel_query_xml_tree(query: str) -> ET.Element:
"""Get xml output of bazel query invocation, parsed as XML tree"""
output = subprocess.check_output(
['tools/bazel', 'query', '--noimplicit_deps', '--output', 'xml', query])
@@ -98,14 +102,14 @@ def _extract_rules_from_bazel_xml(xml_tree):
return result
-def _get_bazel_label(target_name):
+def _get_bazel_label(target_name: str) -> str:
if ':' in target_name:
return '//%s' % target_name
else:
return '//:%s' % target_name
-def _extract_source_file_path(label):
+def _extract_source_file_path(label: str) -> str:
"""Gets relative path to source file from bazel deps listing"""
if label.startswith('//'):
label = label[len('//'):]
@@ -117,7 +121,7 @@ def _extract_source_file_path(label):
return label
-def _extract_public_headers(bazel_rule):
+def _extract_public_headers(bazel_rule: BuildMetadata) -> List[str]:
"""Gets list of public headers from a bazel rule"""
result = []
for dep in bazel_rule['hdrs']:
@@ -126,7 +130,7 @@ def _extract_public_headers(bazel_rule):
return list(sorted(result))
-def _extract_nonpublic_headers(bazel_rule):
+def _extract_nonpublic_headers(bazel_rule: BuildMetadata) -> List[str]:
"""Gets list of non-public headers from a bazel rule"""
result = []
for dep in bazel_rule['hdrs']:
@@ -136,7 +140,7 @@ def _extract_nonpublic_headers(bazel_rule):
return list(sorted(result))
-def _extract_sources(bazel_rule):
+def _extract_sources(bazel_rule: BuildMetadata) -> List[str]:
"""Gets list of source files from a bazel rule"""
result = []
for dep in bazel_rule['srcs']:
@@ -146,12 +150,14 @@ def _extract_sources(bazel_rule):
return list(sorted(result))
-def _extract_deps(bazel_rule):
+def _extract_deps(bazel_rule: BuildMetadata,
+ bazel_rules: BuildDict) -> List[str]:
"""Gets list of deps from from a bazel rule"""
return list(sorted(bazel_rule['deps']))
-def _create_target_from_bazel_rule(target_name, bazel_rules):
+def _create_target_from_bazel_rule(target_name: str,
+ bazel_rules: BuildDict) -> BuildMetadata:
"""Create build.yaml-like target definition from bazel metadata"""
bazel_rule = bazel_rules[_get_bazel_label(target_name)]
@@ -164,71 +170,16 @@ def _create_target_from_bazel_rule(target_name, bazel_rules):
'_PUBLIC_HEADERS_BAZEL': _extract_public_headers(bazel_rule),
'_HEADERS_BAZEL': _extract_nonpublic_headers(bazel_rule),
'_SRC_BAZEL': _extract_sources(bazel_rule),
- '_DEPS_BAZEL': _extract_deps(bazel_rule),
+ '_DEPS_BAZEL': _extract_deps(bazel_rule, bazel_rules),
+ 'public_headers': bazel_rule['_COLLAPSED_PUBLIC_HEADERS'],
+ 'headers': bazel_rule['_COLLAPSED_HEADERS'],
+ 'src': bazel_rule['_COLLAPSED_SRCS'],
+ 'deps': bazel_rule['_COLLAPSED_DEPS'],
}
return result
-def _sort_by_build_order(lib_names, lib_dict, deps_key_name, verbose=False):
- """Sort library names to form correct build order. Use metadata from lib_dict"""
- # we find correct build order by performing a topological sort
- # expected output: if library B depends on A, A should be listed first
-
- # all libs that are not in the dictionary are considered external.
- external_deps = list(
- sorted([lib_name for lib_name in lib_names if lib_name not in lib_dict
- ]))
- if verbose:
- print('topo_ordering ' + str(lib_names))
- print(' external_deps ' + str(external_deps))
-
- result = list(external_deps) # external deps will be listed first
- while len(result) < len(lib_names):
- more_results = []
- for lib in lib_names:
- if lib not in result:
- dep_set = set(lib_dict[lib].get(deps_key_name, []))
- dep_set = dep_set.intersection(lib_names)
- # if lib only depends on what's already built, add it to the results
- if not dep_set.difference(set(result)):
- more_results.append(lib)
- if not more_results:
- raise Exception(
- 'Cannot sort topologically, there seems to be a cyclic dependency'
- )
- if verbose:
- print(' adding ' + str(more_results))
- result = result + list(
- sorted(more_results
- )) # when build order doesn't matter, sort lexicographically
- return result
-
-
-# TODO(jtattermusch): deduplicate with transitive_dependencies.py (which has a slightly different logic)
-def _populate_transitive_deps(bazel_rules):
- """Add 'transitive_deps' field for each of the rules"""
- transitive_deps = {}
- for rule_name in bazel_rules.keys():
- transitive_deps[rule_name] = set(bazel_rules[rule_name]['deps'])
-
- while True:
- deps_added = 0
- for rule_name in bazel_rules.keys():
- old_deps = transitive_deps[rule_name]
- new_deps = set(old_deps)
- for dep_name in old_deps:
- new_deps.update(transitive_deps.get(dep_name, set()))
- deps_added += len(new_deps) - len(old_deps)
- transitive_deps[rule_name] = new_deps
- # if none of the transitive dep sets has changed, we're done
- if deps_added == 0:
- break
-
- for rule_name, bazel_rule in bazel_rules.items():
- bazel_rule['transitive_deps'] = list(sorted(transitive_deps[rule_name]))
-
-
-def _external_dep_name_from_bazel_dependency(bazel_dep):
+def _external_dep_name_from_bazel_dependency(bazel_dep: str) -> Optional[str]:
"""Returns name of dependency if external bazel dependency is provided or None"""
if bazel_dep.startswith('@com_google_absl//'):
# special case for add dependency on one of the absl libraries (there is not just one absl library)
@@ -238,6 +189,8 @@ def _external_dep_name_from_bazel_dependency(bazel_dep):
return 'upb'
elif bazel_dep == '//external:benchmark':
return 'benchmark'
+ elif bazel_dep == '//external:libssl':
+ return 'libssl'
else:
# all the other external deps such as protobuf, cares, zlib
# don't need to be listed explicitly, they are handled automatically
@@ -245,98 +198,186 @@ def _external_dep_name_from_bazel_dependency(bazel_dep):
return None
-def _expand_intermediate_deps(target_dict, public_dep_names, bazel_rules):
- # Some of the libraries defined by bazel won't be exposed in build.yaml
- # We call these "intermediate" dependencies. This method expands
- # the intermediate deps for given target (populates library's
- # headers, sources and dicts as if the intermediate dependency never existed)
-
- # use this dictionary to translate from bazel labels to dep names
- bazel_label_to_dep_name = {}
- for dep_name in public_dep_names:
- bazel_label_to_dep_name[_get_bazel_label(dep_name)] = dep_name
+def _compute_transitive_metadata(
+ rule_name: str, bazel_rules: Any,
+ bazel_label_to_dep_name: Dict[str, str]) -> None:
+ """Computes the final build metadata for Bazel target with rule_name.
+
+ The dependencies that will appear on the deps list are:
+
+ * Public build targets including binaries and tests;
+ * External targets, like absl, re2.
+
+ All other intermediate dependencies will be merged, which means their
+ source file, headers, etc. will be collected into one build target. This
+ step of processing will greatly reduce the complexity of the generated
+ build specifications for other build systems, like CMake, Make, setuptools.
+
+ The final build metadata are:
+ * _TRANSITIVE_DEPS: all the transitive dependencies including intermediate
+ targets;
+ * _COLLAPSED_DEPS: dependencies that fits our requirement above, and it
+ will remove duplicated items and produce the shortest
+ possible dependency list in alphabetical order;
+ * _COLLAPSED_SRCS: the merged source files;
+ * _COLLAPSED_PUBLIC_HEADERS: the merged public headers;
+ * _COLLAPSED_HEADERS: the merged non-public headers;
+ * _EXCLUDE_DEPS: intermediate targets to exclude when performing collapsing
+ of sources and dependencies.
+
+ For the collapsed_deps, the algorithm improved cases like:
+
+ The result in the past:
+ end2end_tests -> [grpc_test_util, grpc, gpr, address_sorting, upb]
+ grpc_test_util -> [grpc, gpr, address_sorting, upb, ...]
+ grpc -> [gpr, address_sorting, upb, ...]
+
+ The result of the algorithm:
+ end2end_tests -> [grpc_test_util]
+ grpc_test_util -> [grpc]
+ grpc -> [gpr, address_sorting, upb, ...]
+ """
+ bazel_rule = bazel_rules[rule_name]
+ direct_deps = _extract_deps(bazel_rule, bazel_rules)
+ transitive_deps = set()
+ collapsed_deps = set()
+ exclude_deps = set()
+ collapsed_srcs = set(_extract_sources(bazel_rule))
+ collapsed_public_headers = set(_extract_public_headers(bazel_rule))
+ collapsed_headers = set(_extract_nonpublic_headers(bazel_rule))
+
+ for dep in direct_deps:
+ external_dep_name_maybe = _external_dep_name_from_bazel_dependency(dep)
+
+ if dep in bazel_rules:
+ # Descend recursively, but no need to do that for external deps
+ if external_dep_name_maybe is None:
+ if "_PROCESSING_DONE" not in bazel_rules[dep]:
+ # This item is not processed before, compute now
+ _compute_transitive_metadata(dep, bazel_rules,
+ bazel_label_to_dep_name)
+
+ transitive_deps.update(bazel_rules[dep].get(
+ '_TRANSITIVE_DEPS', []))
+ collapsed_deps.update(
+ collapsed_deps, bazel_rules[dep].get('_COLLAPSED_DEPS', []))
+ exclude_deps.update(bazel_rules[dep].get('_EXCLUDE_DEPS', []))
+
+ # This dep is a public target, add it as a dependency
+ if dep in bazel_label_to_dep_name:
+ transitive_deps.update([bazel_label_to_dep_name[dep]])
+ collapsed_deps.update(collapsed_deps,
+ [bazel_label_to_dep_name[dep]])
+ # Add all the transitive deps of our every public dep to exclude
+ # list since we want to avoid building sources that are already
+ # built by our dependencies
+ exclude_deps.update(bazel_rules[dep]['_TRANSITIVE_DEPS'])
+ continue
- target_name = target_dict['name']
- bazel_deps = target_dict['_DEPS_BAZEL']
+ # This dep is an external target, add it as a dependency
+ if external_dep_name_maybe is not None:
+ transitive_deps.update([external_dep_name_maybe])
+ collapsed_deps.update(collapsed_deps, [external_dep_name_maybe])
+ continue
- # initial values
- public_headers = set(target_dict['_PUBLIC_HEADERS_BAZEL'])
- headers = set(target_dict['_HEADERS_BAZEL'])
- src = set(target_dict['_SRC_BAZEL'])
- deps = set()
+ # Direct dependencies are part of transitive dependencies
+ transitive_deps.update(direct_deps)
+
+ # Calculate transitive public deps (needed for collapsing sources)
+ transitive_public_deps = set(
+ filter(lambda x: x in bazel_label_to_dep_name, transitive_deps))
+
+ # Remove intermediate targets that our public dependencies already depend
+ # on. This is the step that further shorten the deps list.
+ collapsed_deps = set(filter(lambda x: x not in exclude_deps,
+ collapsed_deps))
+
+ # Compute the final source files and headers for this build target whose
+ # name is `rule_name` (input argument of this function).
+ #
+ # Imaging a public target PX has transitive deps [IA, IB, PY, IC, PZ]. PX,
+ # PY and PZ are public build targets. And IA, IB, IC are intermediate
+ # targets. In addition, PY depends on IC.
+ #
+ # Translate the condition into dependency graph:
+ # PX -> [IA, IB, PY, IC, PZ]
+ # PY -> [IC]
+ # Public targets: [PX, PY, PZ]
+ #
+ # The collapsed dependencies of PX: [PY, PZ].
+ # The excluded dependencies of X: [PY, IC, PZ].
+ # (IC is excluded as a dependency of PX. It is already included in PY, hence
+ # it would be redundant to include it again.)
+ #
+ # Target PX should include source files and headers of [PX, IA, IB] as final
+ # build metadata.
+ for dep in transitive_deps:
+ if dep not in exclude_deps and dep not in transitive_public_deps:
+ if dep in bazel_rules:
+ collapsed_srcs.update(_extract_sources(bazel_rules[dep]))
+ collapsed_public_headers.update(
+ _extract_public_headers(bazel_rules[dep]))
+ collapsed_headers.update(
+ _extract_nonpublic_headers(bazel_rules[dep]))
+
+ # This item is a "visited" flag
+ bazel_rule['_PROCESSING_DONE'] = True
+ # Following items are described in the docstinrg.
+ bazel_rule['_TRANSITIVE_DEPS'] = list(sorted(transitive_deps))
+ bazel_rule['_COLLAPSED_DEPS'] = list(sorted(collapsed_deps))
+ bazel_rule['_COLLAPSED_SRCS'] = list(sorted(collapsed_srcs))
+ bazel_rule['_COLLAPSED_PUBLIC_HEADERS'] = list(
+ sorted(collapsed_public_headers))
+ bazel_rule['_COLLAPSED_HEADERS'] = list(sorted(collapsed_headers))
+ bazel_rule['_EXCLUDE_DEPS'] = list(sorted(exclude_deps))
- expansion_blacklist = set()
- to_expand = set(bazel_deps)
- while to_expand:
- # start with the last dependency to be built
- build_order = _sort_by_build_order(list(to_expand), bazel_rules,
- 'transitive_deps')
+# TODO(jtattermusch): deduplicate with transitive_dependencies.py (which has a slightly different logic)
+# TODO(jtattermusch): This is done to avoid introducing too many intermediate
+# libraries into the build.yaml-based builds (which might in cause issues
+# building language-specific artifacts) and also because the libraries
+# in build.yaml-based build are generally considered units of distributions
+# (= public libraries that are visible to the user and are installable),
+# while in bazel builds it is customary to define larger number of smaller
+# "sublibraries". The need for elision (and expansion)
+# of intermediate libraries can be re-evaluated in the future.
+def _populate_transitive_metadata(bazel_rules: Any,
+ public_dep_names: Iterable[str]) -> None:
+ """Add 'transitive_deps' field for each of the rules"""
+ # Create the map between Bazel label and public dependency name
+ bazel_label_to_dep_name = {}
+ for dep_name in public_dep_names:
+ bazel_label_to_dep_name[_get_bazel_label(dep_name)] = dep_name
- bazel_dep = build_order[-1]
- to_expand.remove(bazel_dep)
+ # Make sure we reached all the Bazel rules
+ # TODO(lidiz) potentially we could only update a subset of rules
+ for rule_name in bazel_rules:
+ if '_PROCESSING_DONE' not in bazel_rules[rule_name]:
+ _compute_transitive_metadata(rule_name, bazel_rules,
+ bazel_label_to_dep_name)
- is_public = bazel_dep in bazel_label_to_dep_name
- external_dep_name_maybe = _external_dep_name_from_bazel_dependency(
- bazel_dep)
- if is_public:
- # this is not an intermediate dependency we so we add it
- # to the list of public dependencies to the list, in the right format
- deps.add(bazel_label_to_dep_name[bazel_dep])
+def update_test_metadata_with_transitive_metadata(
+ all_extra_metadata: BuildDict, bazel_rules: BuildDict) -> None:
+ """Patches test build metadata with transitive metadata."""
+ for lib_name, lib_dict in all_extra_metadata.items():
+ # Skip if it isn't not an test
+ if lib_dict.get('build') != 'test' or lib_dict.get('_TYPE') != 'target':
+ continue
- # we do not want to expand any intermediate libraries that are already included
- # by the dependency we just added
- expansion_blacklist.update(
- bazel_rules[bazel_dep]['transitive_deps'])
+ bazel_rule = bazel_rules[_get_bazel_label(lib_name)]
- elif external_dep_name_maybe:
- deps.add(external_dep_name_maybe)
+ if '//external:benchmark' in bazel_rule['_TRANSITIVE_DEPS']:
+ lib_dict['benchmark'] = True
+ lib_dict['defaults'] = 'benchmark'
- elif bazel_dep.startswith(
- '//external:') or not bazel_dep.startswith('//'):
- # all the other external deps can be skipped
- pass
+ if '//external:gtest' in bazel_rule['_TRANSITIVE_DEPS']:
+ lib_dict['gtest'] = True
+ lib_dict['language'] = 'c++'
- elif bazel_dep in expansion_blacklist:
- # do not expand if a public dependency that depends on this has already been expanded
- pass
- else:
- if bazel_dep in bazel_rules:
- # this is an intermediate library, expand it
- public_headers.update(
- _extract_public_headers(bazel_rules[bazel_dep]))
- headers.update(
- _extract_nonpublic_headers(bazel_rules[bazel_dep]))
- src.update(_extract_sources(bazel_rules[bazel_dep]))
-
- new_deps = _extract_deps(bazel_rules[bazel_dep])
- to_expand.update(new_deps)
- else:
- raise Exception(bazel_dep + ' not in bazel_rules')
-
- # make the 'deps' field transitive, but only list non-intermediate deps and selected external deps
- bazel_transitive_deps = bazel_rules[_get_bazel_label(
- target_name)]['transitive_deps']
- for transitive_bazel_dep in bazel_transitive_deps:
- public_name = bazel_label_to_dep_name.get(transitive_bazel_dep, None)
- if public_name:
- deps.add(public_name)
- external_dep_name_maybe = _external_dep_name_from_bazel_dependency(
- transitive_bazel_dep)
- if external_dep_name_maybe:
- # expanding all absl libraries is technically correct but creates too much noise
- if not external_dep_name_maybe.startswith('absl'):
- deps.add(external_dep_name_maybe)
-
- target_dict['public_headers'] = list(sorted(public_headers))
- target_dict['headers'] = list(sorted(headers))
- target_dict['src'] = list(sorted(src))
- target_dict['deps'] = list(sorted(deps))
-
-
-def _generate_build_metadata(build_extra_metadata, bazel_rules):
+def _generate_build_metadata(build_extra_metadata: BuildDict,
+ bazel_rules: BuildDict) -> BuildDict:
"""Generate build metadata in build.yaml-like format bazel build metadata and build.yaml-specific "extra metadata"."""
lib_names = list(build_extra_metadata.keys())
result = {}
@@ -344,20 +385,6 @@ def _generate_build_metadata(build_extra_metadata, bazel_rules):
for lib_name in lib_names:
lib_dict = _create_target_from_bazel_rule(lib_name, bazel_rules)
- # Figure out the final list of headers and sources for given target.
- # While this is mostly based on bazel build metadata, build.yaml does
- # not necessarily expose all the targets that are present in bazel build.
- # These "intermediate dependencies" might get flattened.
- # TODO(jtattermusch): This is done to avoid introducing too many intermediate
- # libraries into the build.yaml-based builds (which might in cause issues
- # building language-specific artifacts) and also because the libraries
- # in build.yaml-based build are generally considered units of distributions
- # (= public libraries that are visible to the user and are installable),
- # while in bazel builds it is customary to define larger number of smaller
- # "sublibraries". The need for elision (and expansion)
- # of intermediate libraries can be re-evaluated in the future.
- _expand_intermediate_deps(lib_dict, lib_names, bazel_rules)
-
# populate extra properties from the build.yaml-specific "extra metadata"
lib_dict.update(build_extra_metadata.get(lib_name, {}))
@@ -374,8 +401,8 @@ def _generate_build_metadata(build_extra_metadata, bazel_rules):
if to_name:
# store lib under the new name and also change its 'name' property
if to_name in result:
- raise Exception('Cannot rename target ' + lib_name + ', ' +
- to_name + ' already exists.')
+ raise Exception('Cannot rename target ' + str(lib_name) + ', ' +
+ str(to_name) + ' already exists.')
lib_dict = result.pop(lib_name)
lib_dict['name'] = to_name
result[to_name] = lib_dict
@@ -387,15 +414,10 @@ def _generate_build_metadata(build_extra_metadata, bazel_rules):
for dep in lib_dict_to_update['deps']
])
- # make sure deps are listed in reverse topological order (e.g. "grpc gpr" and not "gpr grpc")
- for lib_dict in result.values():
- lib_dict['deps'] = list(
- reversed(_sort_by_build_order(lib_dict['deps'], result, 'deps')))
-
return result
-def _convert_to_build_yaml_like(lib_dict):
+def _convert_to_build_yaml_like(lib_dict: BuildMetadata) -> BuildYaml:
lib_names = [
lib_name for lib_name in list(lib_dict.keys())
if lib_dict[lib_name].get('_TYPE', 'library') == 'library'
@@ -438,7 +460,7 @@ def _convert_to_build_yaml_like(lib_dict):
return build_yaml_like
-def _extract_cc_tests(bazel_rules):
+def _extract_cc_tests(bazel_rules: BuildDict) -> List[str]:
"""Gets list of cc_test tests from bazel rules"""
result = []
for bazel_rule in bazel_rules.values():
@@ -450,7 +472,7 @@ def _extract_cc_tests(bazel_rules):
return list(sorted(result))
-def _exclude_unwanted_cc_tests(tests):
+def _exclude_unwanted_cc_tests(tests: List[str]) -> List[str]:
"""Filters out bazel tests that we don't want to run with other build systems or we cannot build them reasonably"""
# most qps tests are autogenerated, we are fine without them
@@ -516,7 +538,8 @@ def _exclude_unwanted_cc_tests(tests):
return tests
-def _generate_build_extra_metadata_for_tests(tests, bazel_rules):
+def _generate_build_extra_metadata_for_tests(
+ tests: List[str], bazel_rules: BuildDict) -> BuildDict:
"""For given tests, generate the "extra metadata" that we need for our "build.yaml"-like output. The extra metadata is generated from the bazel rule metadata by using a bunch of heuristics."""
test_metadata = {}
for test in tests:
@@ -565,19 +588,11 @@ def _generate_build_extra_metadata_for_tests(tests, bazel_rules):
platforms.append('windows')
test_dict['platforms'] = platforms
- if '//external:benchmark' in bazel_rule['transitive_deps']:
- test_dict['benchmark'] = True
- test_dict['defaults'] = 'benchmark'
-
cmdline_args = bazel_rule['args']
if cmdline_args:
test_dict['args'] = list(cmdline_args)
- uses_gtest = '//external:gtest' in bazel_rule['transitive_deps']
- if uses_gtest:
- test_dict['gtest'] = True
-
- if test.startswith('test/cpp') or uses_gtest:
+ if test.startswith('test/cpp'):
test_dict['language'] = 'c++'
elif test.startswith('test/core'):
@@ -613,7 +628,7 @@ def _generate_build_extra_metadata_for_tests(tests, bazel_rules):
return test_metadata
-def _detect_and_print_issues(build_yaml_like):
+def _detect_and_print_issues(build_yaml_like: BuildYaml) -> None:
"""Try detecting some unusual situations and warn about them."""
for tgt in build_yaml_like['targets']:
if tgt['build'] == 'test':
@@ -627,33 +642,26 @@ def _detect_and_print_issues(build_yaml_like):
# there are mostly extra properties that we weren't able to obtain from the bazel build
# _TYPE: whether this is library, target or test
# _RENAME: whether this target should be renamed to a different name (to match expectations of make and cmake builds)
-# NOTE: secure is 'check' by default, so setting secure = False below does matter
_BUILD_EXTRA_METADATA = {
'third_party/address_sorting:address_sorting': {
'language': 'c',
'build': 'all',
- 'secure': False,
'_RENAME': 'address_sorting'
},
'gpr': {
'language': 'c',
'build': 'all',
- 'secure': False
},
'grpc': {
'language': 'c',
'build': 'all',
'baselib': True,
- 'secure': True,
- 'deps_linkage': 'static',
- 'dll': True,
'generate_plugin_registry': True
},
'grpc++': {
'language': 'c++',
'build': 'all',
'baselib': True,
- 'dll': True
},
'grpc++_alts': {
'language': 'c++',
@@ -672,23 +680,16 @@ _BUILD_EXTRA_METADATA = {
'language': 'c++',
'build': 'all',
'baselib': True,
- 'secure': False,
- 'dll': True
},
# TODO(jtattermusch): do we need to set grpc_csharp_ext's LDFLAGS for wrapping memcpy in the same way as in build.yaml?
'grpc_csharp_ext': {
'language': 'c',
'build': 'all',
- 'deps_linkage': 'static',
- 'dll': 'only'
},
'grpc_unsecure': {
'language': 'c',
'build': 'all',
'baselib': True,
- 'secure': False,
- 'deps_linkage': 'static',
- 'dll': True,
'generate_plugin_registry': True
},
'grpcpp_channelz': {
@@ -702,55 +703,47 @@ _BUILD_EXTRA_METADATA = {
'src/compiler:grpc_plugin_support': {
'language': 'c++',
'build': 'protoc',
- 'secure': False,
'_RENAME': 'grpc_plugin_support'
},
'src/compiler:grpc_cpp_plugin': {
'language': 'c++',
'build': 'protoc',
- 'secure': False,
'_TYPE': 'target',
'_RENAME': 'grpc_cpp_plugin'
},
'src/compiler:grpc_csharp_plugin': {
'language': 'c++',
'build': 'protoc',
- 'secure': False,
'_TYPE': 'target',
'_RENAME': 'grpc_csharp_plugin'
},
'src/compiler:grpc_node_plugin': {
'language': 'c++',
'build': 'protoc',
- 'secure': False,
'_TYPE': 'target',
'_RENAME': 'grpc_node_plugin'
},
'src/compiler:grpc_objective_c_plugin': {
'language': 'c++',
'build': 'protoc',
- 'secure': False,
'_TYPE': 'target',
'_RENAME': 'grpc_objective_c_plugin'
},
'src/compiler:grpc_php_plugin': {
'language': 'c++',
'build': 'protoc',
- 'secure': False,
'_TYPE': 'target',
'_RENAME': 'grpc_php_plugin'
},
'src/compiler:grpc_python_plugin': {
'language': 'c++',
'build': 'protoc',
- 'secure': False,
'_TYPE': 'target',
'_RENAME': 'grpc_python_plugin'
},
'src/compiler:grpc_ruby_plugin': {
'language': 'c++',
'build': 'protoc',
- 'secure': False,
'_TYPE': 'target',
'_RENAME': 'grpc_ruby_plugin'
},
@@ -766,7 +759,6 @@ _BUILD_EXTRA_METADATA = {
'test/core/util:grpc_test_util_unsecure': {
'language': 'c',
'build': 'private',
- 'secure': False,
'_RENAME': 'grpc_test_util_unsecure'
},
# TODO(jtattermusch): consider adding grpc++_test_util_unsecure - it doesn't seem to be used by bazel build (don't forget to set secure: False)
@@ -785,13 +777,11 @@ _BUILD_EXTRA_METADATA = {
'test/core/end2end:end2end_tests': {
'language': 'c',
'build': 'private',
- 'secure': True,
'_RENAME': 'end2end_tests'
},
'test/core/end2end:end2end_nosec_tests': {
'language': 'c',
'build': 'private',
- 'secure': False,
'_RENAME': 'end2end_nosec_tests'
},
@@ -991,6 +981,8 @@ _BAZEL_DEPS_QUERIES = [
'deps("//:all")',
'deps("//src/compiler/...")',
'deps("//src/proto/...")',
+ # The ^ is needed to differentiate proto_library from go_proto_library
+ 'deps(kind("^proto_library", @envoy_api//envoy/...))',
]
# Step 1: run a bunch of "bazel query --output xml" queries to collect
@@ -1010,14 +1002,6 @@ for query in _BAZEL_DEPS_QUERIES:
bazel_rules.update(
_extract_rules_from_bazel_xml(_bazel_query_xml_tree(query)))
-# Step 1a: Knowing the transitive closure of dependencies will make
-# the postprocessing simpler, so compute the info for all our rules.
-#
-# Example:
-# '//:grpc' : { ...,
-# 'transitive_deps': ['//:gpr_base', ...] }
-_populate_transitive_deps(bazel_rules)
-
# Step 2: Extract the known bazel cc_test tests. While most tests
# will be buildable with other build systems just fine, some of these tests
# would be too difficult to build and run with other build systems,
@@ -1072,7 +1056,25 @@ all_extra_metadata.update(_BUILD_EXTRA_METADATA)
all_extra_metadata.update(
_generate_build_extra_metadata_for_tests(tests, bazel_rules))
-# Step 4: Generate the final metadata for all the targets.
+# Step 4: Compute the build metadata that will be used in the final build.yaml.
+# The final build metadata includes transitive dependencies, and sources/headers
+# expanded without intermediate dependencies.
+# Example:
+# '//:grpc' : { ...,
+# '_TRANSITIVE_DEPS': ['//:gpr_base', ...],
+# '_COLLAPSED_DEPS': ['gpr', ...],
+# '_COLLAPSED_SRCS': [...],
+# '_COLLAPSED_PUBLIC_HEADERS': [...],
+# '_COLLAPSED_HEADERS': [...]
+# }
+_populate_transitive_metadata(bazel_rules, all_extra_metadata.keys())
+
+# Step 4a: Update the existing test metadata with the updated build metadata.
+# Certain build metadata of certain test targets depend on the transitive
+# metadata that wasn't available earlier.
+update_test_metadata_with_transitive_metadata(all_extra_metadata, bazel_rules)
+
+# Step 5: Generate the final metadata for all the targets.
# This is done by combining the bazel build metadata and the "extra metadata"
# we obtained in the previous step.
# In this step, we also perform some interesting massaging of the target metadata
@@ -1102,7 +1104,7 @@ all_extra_metadata.update(
# ... }
all_targets_dict = _generate_build_metadata(all_extra_metadata, bazel_rules)
-# Step 5: convert the dictionary with all the targets to a dict that has
+# Step 6: convert the dictionary with all the targets to a dict that has
# the desired "build.yaml"-like layout.
# TODO(jtattermusch): We use the custom "build.yaml"-like layout because
# currently all other build systems use that format as their source of truth.
@@ -1119,7 +1121,7 @@ build_yaml_like = _convert_to_build_yaml_like(all_targets_dict)
# detect and report some suspicious situations we've seen before
_detect_and_print_issues(build_yaml_like)
-# Step 6: Store the build_autogenerated.yaml in a deterministic (=sorted)
+# Step 7: Store the build_autogenerated.yaml in a deterministic (=sorted)
# and cleaned-up form.
# A basic overview of the resulting "build.yaml"-like format is here:
# https://github.com/grpc/grpc/blob/master/templates/README.md