aboutsummaryrefslogtreecommitdiff
path: root/bazel
diff options
context:
space:
mode:
authorvam <vam@google.com>2019-05-03 16:49:50 -0700
committervam <vam@google.com>2019-05-03 16:49:50 -0700
commitf4affeec6641ba0f6d64cb386233421b4775360b (patch)
treec5b5e0fc9c8c8dad7ad1e892c54251be322a53e7 /bazel
parent4269ce08f4e6c6fb7a91a662e04469b26ceb0c30 (diff)
parent1ef80f46bd9176b8411196b864fd174a2c007bf2 (diff)
downloadgrpc-grpc-f4affeec6641ba0f6d64cb386233421b4775360b.tar.gz
Merge remote-tracking branch 'upstream/master'
Diffstat (limited to 'bazel')
-rw-r--r--bazel/generate_cc.bzl202
-rw-r--r--bazel/grpc_python_deps.bzl14
-rw-r--r--bazel/protobuf.bzl84
-rw-r--r--bazel/python_rules.bzl203
4 files changed, 417 insertions, 86 deletions
diff --git a/bazel/generate_cc.bzl b/bazel/generate_cc.bzl
index 8f30c84f6b..82f5cbad31 100644
--- a/bazel/generate_cc.bzl
+++ b/bazel/generate_cc.bzl
@@ -4,81 +4,132 @@ This is an internal rule used by cc_grpc_library, and shouldn't be used
directly.
"""
-def generate_cc_impl(ctx):
- """Implementation of the generate_cc rule."""
- protos = [f for src in ctx.attr.srcs for f in src.proto.direct_sources]
- includes = [f for src in ctx.attr.srcs for f in src.proto.transitive_imports]
- outs = []
- # label_len is length of the path from WORKSPACE root to the location of this build file
- label_len = 0
- # proto_root is the directory relative to which generated include paths should be
- proto_root = ""
- if ctx.label.package:
- # The +1 is for the trailing slash.
- label_len += len(ctx.label.package) + 1
- if ctx.label.workspace_root:
- label_len += len(ctx.label.workspace_root) + 1
- proto_root = "/" + ctx.label.workspace_root
+load(
+ "//bazel:protobuf.bzl",
+ "get_include_protoc_args",
+ "get_plugin_args",
+ "get_proto_root",
+ "proto_path_to_generated_filename",
+)
+
+_GRPC_PROTO_HEADER_FMT = "{}.grpc.pb.h"
+_GRPC_PROTO_SRC_FMT = "{}.grpc.pb.cc"
+_GRPC_PROTO_MOCK_HEADER_FMT = "{}_mock.grpc.pb.h"
+_PROTO_HEADER_FMT = "{}.pb.h"
+_PROTO_SRC_FMT = "{}.pb.cc"
- if ctx.executable.plugin:
- outs += [proto.path[label_len:-len(".proto")] + ".grpc.pb.h" for proto in protos]
- outs += [proto.path[label_len:-len(".proto")] + ".grpc.pb.cc" for proto in protos]
- if ctx.attr.generate_mocks:
- outs += [proto.path[label_len:-len(".proto")] + "_mock.grpc.pb.h" for proto in protos]
- else:
- outs += [proto.path[label_len:-len(".proto")] + ".pb.h" for proto in protos]
- outs += [proto.path[label_len:-len(".proto")] + ".pb.cc" for proto in protos]
- out_files = [ctx.actions.declare_file(out) for out in outs]
- dir_out = str(ctx.genfiles_dir.path + proto_root)
+def _strip_package_from_path(label_package, path):
+ if len(label_package) == 0:
+ return path
+ if not path.startswith(label_package + "/"):
+ fail("'{}' does not lie within '{}'.".format(path, label_package))
+ return path[len(label_package + "/"):]
- arguments = []
- if ctx.executable.plugin:
- arguments += ["--plugin=protoc-gen-PLUGIN=" + ctx.executable.plugin.path]
- flags = list(ctx.attr.flags)
- if ctx.attr.generate_mocks:
- flags.append("generate_mock_code=true")
- arguments += ["--PLUGIN_out=" + ",".join(flags) + ":" + dir_out]
- tools = [ctx.executable.plugin]
- else:
- arguments += ["--cpp_out=" + ",".join(ctx.attr.flags) + ":" + dir_out]
- tools = []
+def _join_directories(directories):
+ massaged_directories = [directory for directory in directories if len(directory) != 0]
+ return "/".join(massaged_directories)
- # Import protos relative to their workspace root so that protoc prints the
- # right include paths.
- for include in includes:
- directory = include.path
- if directory.startswith("external"):
- external_sep = directory.find("/")
- repository_sep = directory.find("/", external_sep + 1)
- arguments += ["--proto_path=" + directory[:repository_sep]]
+def generate_cc_impl(ctx):
+ """Implementation of the generate_cc rule."""
+ protos = [f for src in ctx.attr.srcs for f in src.proto.direct_sources]
+ includes = [
+ f
+ for src in ctx.attr.srcs
+ for f in src.proto.transitive_imports
+ ]
+ outs = []
+ proto_root = get_proto_root(
+ ctx.label.workspace_root,
+ )
+
+ label_package = _join_directories([ctx.label.workspace_root, ctx.label.package])
+ if ctx.executable.plugin:
+ outs += [
+ proto_path_to_generated_filename(
+ _strip_package_from_path(label_package, proto.path),
+ _GRPC_PROTO_HEADER_FMT,
+ )
+ for proto in protos
+ ]
+ outs += [
+ proto_path_to_generated_filename(
+ _strip_package_from_path(label_package, proto.path),
+ _GRPC_PROTO_SRC_FMT,
+ )
+ for proto in protos
+ ]
+ if ctx.attr.generate_mocks:
+ outs += [
+ proto_path_to_generated_filename(
+ _strip_package_from_path(label_package, proto.path),
+ _GRPC_PROTO_MOCK_HEADER_FMT,
+ )
+ for proto in protos
+ ]
else:
- arguments += ["--proto_path=."]
- # Include the output directory so that protoc puts the generated code in the
- # right directory.
- arguments += ["--proto_path={0}{1}".format(dir_out, proto_root)]
- arguments += [proto.path for proto in protos]
+ outs += [
+ proto_path_to_generated_filename(
+ _strip_package_from_path(label_package, proto.path),
+ _PROTO_HEADER_FMT,
+ )
+ for proto in protos
+ ]
+ outs += [
+ proto_path_to_generated_filename(
+ _strip_package_from_path(label_package, proto.path),
+ _PROTO_SRC_FMT,
+ )
+ for proto in protos
+ ]
+ out_files = [ctx.actions.declare_file(out) for out in outs]
+ dir_out = str(ctx.genfiles_dir.path + proto_root)
- # create a list of well known proto files if the argument is non-None
- well_known_proto_files = []
- if ctx.attr.well_known_protos:
- f = ctx.attr.well_known_protos.files.to_list()[0].dirname
- if f != "external/com_google_protobuf/src/google/protobuf":
- print("Error: Only @com_google_protobuf//:well_known_protos is supported")
+ arguments = []
+ if ctx.executable.plugin:
+ arguments += get_plugin_args(
+ ctx.executable.plugin,
+ ctx.attr.flags,
+ dir_out,
+ ctx.attr.generate_mocks,
+ )
+ tools = [ctx.executable.plugin]
else:
- # f points to "external/com_google_protobuf/src/google/protobuf"
- # add -I argument to protoc so it knows where to look for the proto files.
- arguments += ["-I{0}".format(f + "/../..")]
- well_known_proto_files = [f for f in ctx.attr.well_known_protos.files]
+ arguments += ["--cpp_out=" + ",".join(ctx.attr.flags) + ":" + dir_out]
+ tools = []
+
+ arguments += get_include_protoc_args(includes)
- ctx.actions.run(
- inputs = protos + includes + well_known_proto_files,
- tools = tools,
- outputs = out_files,
- executable = ctx.executable._protoc,
- arguments = arguments,
- )
+ # Include the output directory so that protoc puts the generated code in the
+ # right directory.
+ arguments += ["--proto_path={0}{1}".format(dir_out, proto_root)]
+ arguments += [proto.path for proto in protos]
- return struct(files=depset(out_files))
+ # create a list of well known proto files if the argument is non-None
+ well_known_proto_files = []
+ if ctx.attr.well_known_protos:
+ f = ctx.attr.well_known_protos.files.to_list()[0].dirname
+ if f != "external/com_google_protobuf/src/google/protobuf":
+ print(
+ "Error: Only @com_google_protobuf//:well_known_protos is supported",
+ )
+ else:
+ # f points to "external/com_google_protobuf/src/google/protobuf"
+ # add -I argument to protoc so it knows where to look for the proto files.
+ arguments += ["-I{0}".format(f + "/../..")]
+ well_known_proto_files = [
+ f
+ for f in ctx.attr.well_known_protos.files
+ ]
+
+ ctx.actions.run(
+ inputs = protos + includes + well_known_proto_files,
+ tools = tools,
+ outputs = out_files,
+ executable = ctx.executable._protoc,
+ arguments = arguments,
+ )
+
+ return struct(files = depset(out_files))
_generate_cc = rule(
attrs = {
@@ -96,10 +147,8 @@ _generate_cc = rule(
mandatory = False,
allow_empty = True,
),
- "well_known_protos" : attr.label(
- mandatory = False,
- ),
- "generate_mocks" : attr.bool(
+ "well_known_protos": attr.label(mandatory = False),
+ "generate_mocks": attr.bool(
default = False,
mandatory = False,
),
@@ -115,7 +164,10 @@ _generate_cc = rule(
)
def generate_cc(well_known_protos, **kwargs):
- if well_known_protos:
- _generate_cc(well_known_protos="@com_google_protobuf//:well_known_protos", **kwargs)
- else:
- _generate_cc(**kwargs)
+ if well_known_protos:
+ _generate_cc(
+ well_known_protos = "@com_google_protobuf//:well_known_protos",
+ **kwargs
+ )
+ else:
+ _generate_cc(**kwargs)
diff --git a/bazel/grpc_python_deps.bzl b/bazel/grpc_python_deps.bzl
index ec3df19e03..91438f3927 100644
--- a/bazel/grpc_python_deps.bzl
+++ b/bazel/grpc_python_deps.bzl
@@ -1,16 +1,8 @@
load("//third_party/py:python_configure.bzl", "python_configure")
load("@io_bazel_rules_python//python:pip.bzl", "pip_repositories")
load("@grpc_python_dependencies//:requirements.bzl", "pip_install")
-load("@org_pubref_rules_protobuf//python:rules.bzl", "py_proto_repositories")
def grpc_python_deps():
- # TODO(https://github.com/grpc/grpc/issues/18256): Remove conditional.
- if hasattr(native, "http_archive"):
- python_configure(name = "local_config_python")
- pip_repositories()
- pip_install()
- py_proto_repositories()
- else:
- print("Building Python gRPC with bazel 23.0+ is disabled pending " +
- "resolution of https://github.com/grpc/grpc/issues/18256.")
-
+ python_configure(name = "local_config_python")
+ pip_repositories()
+ pip_install()
diff --git a/bazel/protobuf.bzl b/bazel/protobuf.bzl
new file mode 100644
index 0000000000..bddd0d70c7
--- /dev/null
+++ b/bazel/protobuf.bzl
@@ -0,0 +1,84 @@
+"""Utility functions for generating protobuf code."""
+
+_PROTO_EXTENSION = ".proto"
+
+def get_proto_root(workspace_root):
+ """Gets the root protobuf directory.
+
+ Args:
+ workspace_root: context.label.workspace_root
+
+ Returns:
+ The directory relative to which generated include paths should be.
+ """
+ if workspace_root:
+ return "/{}".format(workspace_root)
+ else:
+ return ""
+
+def _strip_proto_extension(proto_filename):
+ if not proto_filename.endswith(_PROTO_EXTENSION):
+ fail('"{}" does not end with "{}"'.format(
+ proto_filename,
+ _PROTO_EXTENSION,
+ ))
+ return proto_filename[:-len(_PROTO_EXTENSION)]
+
+def proto_path_to_generated_filename(proto_path, fmt_str):
+ """Calculates the name of a generated file for a protobuf path.
+
+ For example, "examples/protos/helloworld.proto" might map to
+ "helloworld.pb.h".
+
+ Args:
+ proto_path: The path to the .proto file.
+ fmt_str: A format string used to calculate the generated filename. For
+ example, "{}.pb.h" might be used to calculate a C++ header filename.
+
+ Returns:
+ The generated filename.
+ """
+ return fmt_str.format(_strip_proto_extension(proto_path))
+
+def _get_include_directory(include):
+ directory = include.path
+ if directory.startswith("external"):
+ external_separator = directory.find("/")
+ repository_separator = directory.find("/", external_separator + 1)
+ return directory[:repository_separator]
+ else:
+ return "."
+
+def get_include_protoc_args(includes):
+ """Returns protoc args that imports protos relative to their import root.
+
+ Args:
+ includes: A list of included proto files.
+
+ Returns:
+ A list of arguments to be passed to protoc. For example, ["--proto_path=."].
+ """
+ return [
+ "--proto_path={}".format(_get_include_directory(include))
+ for include in includes
+ ]
+
+def get_plugin_args(plugin, flags, dir_out, generate_mocks):
+ """Returns arguments configuring protoc to use a plugin for a language.
+
+ Args:
+ plugin: An executable file to run as the protoc plugin.
+ flags: The plugin flags to be passed to protoc.
+ dir_out: The output directory for the plugin.
+ generate_mocks: A bool indicating whether to generate mocks.
+
+ Returns:
+ A list of protoc arguments configuring the plugin.
+ """
+ augmented_flags = list(flags)
+ if generate_mocks:
+ augmented_flags.append("generate_mock_code=true")
+ return [
+ "--plugin=protoc-gen-PLUGIN=" + plugin.path,
+ "--PLUGIN_out=" + ",".join(augmented_flags) + ":" + dir_out,
+ ]
diff --git a/bazel/python_rules.bzl b/bazel/python_rules.bzl
new file mode 100644
index 0000000000..bf6b4bec8d
--- /dev/null
+++ b/bazel/python_rules.bzl
@@ -0,0 +1,203 @@
+"""Generates and compiles Python gRPC stubs from proto_library rules."""
+
+load("@grpc_python_dependencies//:requirements.bzl", "requirement")
+load(
+ "//bazel:protobuf.bzl",
+ "get_include_protoc_args",
+ "get_plugin_args",
+ "get_proto_root",
+ "proto_path_to_generated_filename",
+)
+
+_GENERATED_PROTO_FORMAT = "{}_pb2.py"
+_GENERATED_GRPC_PROTO_FORMAT = "{}_pb2_grpc.py"
+
+def _get_staged_proto_file(context, source_file):
+ if source_file.dirname == context.label.package:
+ return source_file
+ else:
+ copied_proto = context.actions.declare_file(source_file.basename)
+ context.actions.run_shell(
+ inputs = [source_file],
+ outputs = [copied_proto],
+ command = "cp {} {}".format(source_file.path, copied_proto.path),
+ mnemonic = "CopySourceProto",
+ )
+ return copied_proto
+
+def _generate_py_impl(context):
+ protos = []
+ for src in context.attr.deps:
+ for file in src.proto.direct_sources:
+ protos.append(_get_staged_proto_file(context, file))
+ includes = [
+ file
+ for src in context.attr.deps
+ for file in src.proto.transitive_imports
+ ]
+ proto_root = get_proto_root(context.label.workspace_root)
+ format_str = (_GENERATED_GRPC_PROTO_FORMAT if context.executable.plugin else _GENERATED_PROTO_FORMAT)
+ out_files = [
+ context.actions.declare_file(
+ proto_path_to_generated_filename(
+ proto.basename,
+ format_str,
+ ),
+ )
+ for proto in protos
+ ]
+
+ arguments = []
+ tools = [context.executable._protoc]
+ if context.executable.plugin:
+ arguments += get_plugin_args(
+ context.executable.plugin,
+ context.attr.flags,
+ context.genfiles_dir.path,
+ False,
+ )
+ tools += [context.executable.plugin]
+ else:
+ arguments += [
+ "--python_out={}:{}".format(
+ ",".join(context.attr.flags),
+ context.genfiles_dir.path,
+ ),
+ ]
+
+ arguments += get_include_protoc_args(includes)
+ arguments += [
+ "--proto_path={}".format(context.genfiles_dir.path)
+ for proto in protos
+ ]
+ for proto in protos:
+ massaged_path = proto.path
+ if massaged_path.startswith(context.genfiles_dir.path):
+ massaged_path = proto.path[len(context.genfiles_dir.path) + 1:]
+ arguments.append(massaged_path)
+
+ well_known_proto_files = []
+ if context.attr.well_known_protos:
+ well_known_proto_directory = context.attr.well_known_protos.files.to_list(
+ )[0].dirname
+
+ arguments += ["-I{}".format(well_known_proto_directory + "/../..")]
+ well_known_proto_files = context.attr.well_known_protos.files.to_list()
+
+ context.actions.run(
+ inputs = protos + includes + well_known_proto_files,
+ tools = tools,
+ outputs = out_files,
+ executable = context.executable._protoc,
+ arguments = arguments,
+ mnemonic = "ProtocInvocation",
+ )
+ return struct(files = depset(out_files))
+
+__generate_py = rule(
+ attrs = {
+ "deps": attr.label_list(
+ mandatory = True,
+ allow_empty = False,
+ providers = ["proto"],
+ ),
+ "plugin": attr.label(
+ executable = True,
+ providers = ["files_to_run"],
+ cfg = "host",
+ ),
+ "flags": attr.string_list(
+ mandatory = False,
+ allow_empty = True,
+ ),
+ "well_known_protos": attr.label(mandatory = False),
+ "_protoc": attr.label(
+ default = Label("//external:protocol_compiler"),
+ executable = True,
+ cfg = "host",
+ ),
+ },
+ output_to_genfiles = True,
+ implementation = _generate_py_impl,
+)
+
+def _generate_py(well_known_protos, **kwargs):
+ if well_known_protos:
+ __generate_py(
+ well_known_protos = "@com_google_protobuf//:well_known_protos",
+ **kwargs
+ )
+ else:
+ __generate_py(**kwargs)
+
+_WELL_KNOWN_PROTO_LIBS = [
+ "@com_google_protobuf//:any_proto",
+ "@com_google_protobuf//:api_proto",
+ "@com_google_protobuf//:compiler_plugin_proto",
+ "@com_google_protobuf//:descriptor_proto",
+ "@com_google_protobuf//:duration_proto",
+ "@com_google_protobuf//:empty_proto",
+ "@com_google_protobuf//:field_mask_proto",
+ "@com_google_protobuf//:source_context_proto",
+ "@com_google_protobuf//:struct_proto",
+ "@com_google_protobuf//:timestamp_proto",
+ "@com_google_protobuf//:type_proto",
+ "@com_google_protobuf//:wrappers_proto",
+]
+
+def py_proto_library(
+ name,
+ deps,
+ well_known_protos = True,
+ proto_only = False,
+ **kwargs):
+ """Generate python code for a protobuf.
+
+ Args:
+ name: The name of the target.
+ deps: A list of dependencies. Must contain a single element.
+ well_known_protos: A bool indicating whether or not to include well-known
+ protos.
+ proto_only: A bool indicating whether to generate vanilla protobuf code
+ or to also generate gRPC code.
+ """
+ if len(deps) > 1:
+ fail("The supported length of 'deps' is 1.")
+
+ codegen_target = "_{}_codegen".format(name)
+ codegen_grpc_target = "_{}_grpc_codegen".format(name)
+
+ well_known_proto_rules = _WELL_KNOWN_PROTO_LIBS if well_known_protos else []
+
+ _generate_py(
+ name = codegen_target,
+ deps = deps,
+ well_known_protos = well_known_protos,
+ **kwargs
+ )
+
+ if not proto_only:
+ _generate_py(
+ name = codegen_grpc_target,
+ deps = deps,
+ plugin = "//:grpc_python_plugin",
+ well_known_protos = well_known_protos,
+ **kwargs
+ )
+
+ native.py_library(
+ name = name,
+ srcs = [
+ ":{}".format(codegen_grpc_target),
+ ":{}".format(codegen_target),
+ ],
+ deps = [requirement("protobuf")],
+ **kwargs
+ )
+ else:
+ native.py_library(
+ name = name,
+ srcs = [":{}".format(codegen_target), ":{}".format(codegen_target)],
+ deps = [requirement("protobuf")],
+ **kwargs
+ )