aboutsummaryrefslogtreecommitdiff
path: root/pw_log
diff options
context:
space:
mode:
Diffstat (limited to 'pw_log')
-rw-r--r--pw_log/Android.bp154
-rw-r--r--pw_log/BUILD.bazel25
-rw-r--r--pw_log/BUILD.gn8
-rw-r--r--pw_log/CMakeLists.txt1
-rw-r--r--pw_log/basic_log_test.cc4
-rw-r--r--pw_log/docs.rst310
-rw-r--r--pw_log/glog_adapter_test.cc2
-rw-r--r--pw_log/public/pw_log/options.h2
-rw-r--r--pw_log/public/pw_log/tokenized_args.h52
-rw-r--r--pw_log/py/BUILD.bazel44
-rw-r--r--pw_log/py/BUILD.gn52
-rw-r--r--pw_log/py/log_decoder_test.py869
-rw-r--r--pw_log/py/pw_log/__init__.py13
-rw-r--r--pw_log/py/pw_log/log_decoder.py413
-rw-r--r--pw_log/py/pyproject.toml16
-rw-r--r--pw_log/py/setup.cfg25
-rw-r--r--pw_log/tokenized_args.rst76
17 files changed, 1932 insertions, 134 deletions
diff --git a/pw_log/Android.bp b/pw_log/Android.bp
index 17138993c..9b38ba320 100644
--- a/pw_log/Android.bp
+++ b/pw_log/Android.bp
@@ -24,6 +24,160 @@ cc_library_headers {
host_supported: true,
}
+cc_library_headers {
+ name: "pw_log_pwpb_headers",
+ cpp_std: "c++20",
+ vendor_available: true,
+ host_supported: true,
+ generated_headers: [
+ "google_protobuf_descriptor_pwpb_h",
+ "pw_log_log_proto_pwpb_h",
+ "pw_log_log_rpc_pwpb_h",
+ "pw_log_log_raw_rpc_h",
+ "pw_protobuf_protos_common_pwpb_h",
+ "pw_tokenizer_proto_options_pwpb_h",
+ ],
+ export_generated_headers: [
+ "google_protobuf_descriptor_pwpb_h",
+ "pw_log_log_proto_pwpb_h",
+ "pw_log_log_rpc_pwpb_h",
+ "pw_log_log_raw_rpc_h",
+ "pw_protobuf_protos_common_pwpb_h",
+ "pw_tokenizer_proto_options_pwpb_h",
+ ],
+}
+
+// Copies the proto files to a prefix directory to add the prefix to the
+// compiled proto. The prefix is taken from the directory name of the first
+// item listen in out.
+genrule_defaults {
+ name: "pw_log_add_prefix_to_proto",
+ cmd: "out_files=($(out)); prefix=$$(dirname $${out_files[0]}); " +
+ "mkdir -p $${prefix}; cp -t $${prefix} $(in);"
+}
+
+genrule {
+ name: "pw_log_log_proto_with_prefix",
+ defaults: ["pw_log_add_prefix_to_proto"],
+ srcs: [
+ "log.proto",
+ ],
+ out: [
+ "pw_log/proto/log.proto",
+ ],
+}
+
+genrule {
+ name: "pw_log_log_proto_pwpb_h",
+ srcs: [
+ ":libprotobuf-internal-protos",
+ ":pw_log_log_proto_with_prefix",
+ ":pw_protobuf_common_proto",
+ ],
+ cmd: "python3 $(location pw_protobuf_compiler_py) " +
+ "--proto-path=external/pigweed/pw_protobuf/ " +
+ // Requires the generated pw_tokenizer/proto/options.proto filepath.
+ "--proto-path=$$(dirname $$(dirname $$(dirname $(location :pw_tokenizer_proto_options_proto)))) " +
+ "--proto-path=$$(dirname $$(dirname $$(dirname $(location :pw_log_log_proto_with_prefix)))) " +
+ "--proto-path=external/protobuf/src/ " +
+ "--out-dir=$(genDir) " +
+ "--plugin-path=$(location pw_protobuf_plugin_py) " +
+ "--compile-dir=$(genDir) " +
+ "--sources $(location :pw_log_log_proto_with_prefix) " +
+ "--language pwpb " +
+ "--no-experimental-proto3-optional " +
+ "--protoc=$(location aprotoc) && " +
+ // TODO(b/308678575) - Ideally the previous command would create the file at the right location,
+ // but following all the layers of pigweed wrappers around protoc is too difficult.
+ "python3 -c \"import os; import shutil; " +
+ "shutil.copy2(os.path.splitext('$(location :pw_log_log_proto_with_prefix)')[0]+'.pwpb.h', '$(out)')\"",
+ out: [
+ "pw_log/proto/log.pwpb.h",
+ ],
+ tool_files: [
+ ":pw_tokenizer_proto_options_proto",
+ ],
+ tools: [
+ "aprotoc",
+ "pw_protobuf_plugin_py",
+ "pw_protobuf_compiler_py",
+ ],
+}
+
+genrule {
+ name: "pw_log_log_rpc_pwpb_h",
+ srcs: [
+ ":libprotobuf-internal-protos",
+ ":pw_log_log_proto_with_prefix",
+ ":pw_protobuf_common_proto",
+ ],
+ cmd: "python3 $(location pw_protobuf_compiler_py) " +
+ "--proto-path=external/pigweed/pw_protobuf/ " +
+ // Requires the generated pw_tokenizer/proto/options.proto filepath.
+ "--proto-path=$$(dirname $$(dirname $$(dirname $(location :pw_tokenizer_proto_options_proto)))) " +
+ "--proto-path=$$(dirname $$(dirname $$(dirname $(location :pw_log_log_proto_with_prefix)))) " +
+ "--proto-path=external/protobuf/src/ " +
+ "--out-dir=$(genDir) " +
+ "--plugin-path=$(location pw_rpc_plugin_pwpb_py) " +
+ "--compile-dir=$(genDir) " +
+ "--sources $(location :pw_log_log_proto_with_prefix) " +
+ "--language pwpb_rpc " +
+ "--no-experimental-proto3-optional " +
+ "--protoc=$(location aprotoc) && " +
+ // TODO(b/308678575) - Ideally the previous command would create the file at the right location,
+ // but following all the layers of pigweed wrappers around protoc is too difficult.
+ "python3 -c \"import os; import shutil; " +
+ "shutil.copy2(os.path.splitext('$(location :pw_log_log_proto_with_prefix)')[0]+'.rpc.pwpb.h', '$(out)')\"",
+ out: [
+ "pw_log/proto/log.rpc.pwpb.h",
+ ],
+ tool_files: [
+ ":pw_tokenizer_proto_options_proto",
+ ],
+ tools: [
+ "aprotoc",
+ "pw_protobuf_compiler_py",
+ "pw_rpc_plugin_pwpb_py",
+ ],
+}
+
+genrule {
+ name: "pw_log_log_raw_rpc_h",
+ srcs: [
+ ":libprotobuf-internal-protos",
+ ":pw_log_log_proto_with_prefix",
+ ":pw_protobuf_common_proto",
+ ],
+ cmd: "python3 $(location pw_protobuf_compiler_py) " +
+ "--proto-path=external/pigweed/pw_protobuf/ " +
+ // Requires the generated pw_tokenizer/proto/options.proto filepath.
+ "--proto-path=$$(dirname $$(dirname $$(dirname $(location :pw_tokenizer_proto_options_proto)))) " +
+ "--proto-path=$$(dirname $$(dirname $$(dirname $(location :pw_log_log_proto_with_prefix)))) " +
+ "--proto-path=external/protobuf/src/ " +
+ "--out-dir=$(genDir) " +
+ "--plugin-path=$(location pw_rpc_plugin_rawpb_py) " +
+ "--compile-dir=$(genDir) " +
+ "--sources $(location :pw_log_log_proto_with_prefix) " +
+ "--language raw_rpc " +
+ "--no-experimental-proto3-optional " +
+ "--protoc=$(location aprotoc) && " +
+ // TODO(b/308678575) - Ideally the previous command would create the file at the right location,
+ // but following all the layers of pigweed wrappers around protoc is too difficult.
+ "python3 -c \"import os; import shutil; " +
+ "shutil.copy2(os.path.splitext('$(location :pw_log_log_proto_with_prefix)')[0]+'.raw_rpc.pb.h', '$(out)')\"",
+ out: [
+ "pw_log/proto/log.raw_rpc.pb.h",
+ ],
+ tool_files: [
+ ":pw_tokenizer_proto_options_proto",
+ ],
+ tools: [
+ "aprotoc",
+ "pw_protobuf_compiler_py",
+ "pw_rpc_plugin_rawpb_py",
+ ],
+}
+
android_library {
name: "pw_log_android_java",
srcs: ["java/android_main/dev/pigweed/pw_log/*.java"],
diff --git a/pw_log/BUILD.bazel b/pw_log/BUILD.bazel
index b1bf3acbf..6a4776f5c 100644
--- a/pw_log/BUILD.bazel
+++ b/pw_log/BUILD.bazel
@@ -18,8 +18,8 @@ load(
"pw_cc_library",
"pw_cc_test",
)
-load("//pw_protobuf_compiler:proto.bzl", "pw_proto_library")
load("//pw_build/bazel_internal:py_proto_library.bzl", "py_proto_library")
+load("//pw_protobuf_compiler:pw_proto_library.bzl", "pw_proto_library")
package(default_visibility = ["//visibility:public"])
@@ -43,12 +43,27 @@ pw_cc_facade(
pw_cc_library(
name = "pw_log",
+ hdrs = [
+ "public/pw_log/config.h",
+ "public/pw_log/levels.h",
+ "public/pw_log/log.h",
+ "public/pw_log/options.h",
+ "public/pw_log/short.h",
+ "public/pw_log/shorter.h",
+ "public/pw_log/tokenized_args.h",
+ ],
+ includes = ["public"],
deps = [
- ":facade",
- "@pigweed_config//:pw_log_backend",
+ "//pw_preprocessor",
+ "@pigweed//targets:pw_log_backend",
],
)
+label_flag(
+ name = "backend_impl",
+ build_setting_default = "//pw_log_basic:impl",
+)
+
pw_cc_library(
name = "glog_adapter",
hdrs = [
@@ -96,7 +111,7 @@ proto_library(
],
)
-# TODO(b/241456982): Not expected to build yet.
+# TODO: b/241456982 - Not expected to build yet.
py_proto_library(
name = "log_proto_py_pb2",
tags = ["manual"],
@@ -115,7 +130,7 @@ pw_proto_library(
pw_cc_library(
name = "backend_multiplexer",
- visibility = ["@pigweed_config//:__pkg__"],
+ visibility = ["@pigweed//targets:__pkg__"],
deps = ["//pw_log_basic"],
)
diff --git a/pw_log/BUILD.gn b/pw_log/BUILD.gn
index 3ff928ed2..e61a294df 100644
--- a/pw_log/BUILD.gn
+++ b/pw_log/BUILD.gn
@@ -59,6 +59,12 @@ pw_facade("pw_log") {
require_link_deps = [ ":impl" ]
}
+pw_source_set("args") {
+ public_configs = [ ":public_include_path" ]
+ public_deps = [ dir_pw_tokenizer ]
+ public = [ "public/pw_log/tokenized_args.h" ]
+}
+
pw_source_set("glog_adapter") {
public_configs = [ ":public_include_path" ]
public = [
@@ -162,12 +168,14 @@ pw_proto_library("protos") {
"$dir_pw_protobuf:common_protos",
"$dir_pw_tokenizer:proto",
]
+ python_package = "py"
}
pw_doc_group("docs") {
sources = [
"docs.rst",
"protobuf.rst",
+ "tokenized_args.rst",
]
inputs = [
"example_layer_diagram.svg",
diff --git a/pw_log/CMakeLists.txt b/pw_log/CMakeLists.txt
index 887bbf8dd..0bbd88414 100644
--- a/pw_log/CMakeLists.txt
+++ b/pw_log/CMakeLists.txt
@@ -38,6 +38,7 @@ pw_add_facade(pw_log INTERFACE
public/pw_log/options.h
public/pw_log/short.h
public/pw_log/shorter.h
+ public/pw_log/tokenized_args.h
PUBLIC_INCLUDES
public
PUBLIC_DEPS
diff --git a/pw_log/basic_log_test.cc b/pw_log/basic_log_test.cc
index 960f9beef..c851d785f 100644
--- a/pw_log/basic_log_test.cc
+++ b/pw_log/basic_log_test.cc
@@ -16,7 +16,7 @@
// compile the constructs promised by the logging facade; and that when run,
// there is no crash.
//
-// TODO(b/235289499): Add verification of the actually logged statements.
+// TODO: b/235289499 - Add verification of the actually logged statements.
// clang-format off
#define PW_LOG_MODULE_NAME "TST"
@@ -28,7 +28,7 @@
#include "gtest/gtest.h"
-// TODO(b/235291136): Test unsigned integer logging (32 and 64 bit); test
+// TODO: b/235291136 - Test unsigned integer logging (32 and 64 bit); test
// pointer logging.
void LoggingFromFunction() { PW_LOG_INFO("From a function!"); }
diff --git a/pw_log/docs.rst b/pw_log/docs.rst
index 3453c08e1..950a82c3e 100644
--- a/pw_log/docs.rst
+++ b/pw_log/docs.rst
@@ -1,3 +1,5 @@
+:tocdepth: 4
+
.. _module-pw_log:
======
@@ -15,9 +17,10 @@ service for efficiently storing and transmitting log messages. See
:ref:`module-pw_log-protobuf` for details.
.. toctree::
- :hidden:
+ :hidden:
- protobuf
+ protobuf
+ tokenized_args
--------------
Usage examples
@@ -27,39 +30,48 @@ long-form names.
.. code-block:: cpp
- #define PW_LOG_MODULE_NAME "BLE"
+ #define PW_LOG_MODULE_NAME "BLE"
- #include "pw_log/log.h"
+ #include "pw_log/log.h"
- int main() {
- PW_LOG_INFO("Booting...");
- PW_LOG_DEBUG("CPU temp: %.2f", cpu_temperature);
- if (BootFailed()) {
- PW_LOG_CRITICAL("Had trouble booting due to error %d", GetErrorCode());
- ReportErrorsAndHalt();
- }
- PW_LOG_INFO("Successfully booted");
- }
+ int main() {
+ PW_LOG_INFO("Booting...");
+ PW_LOG_DEBUG("CPU temp: %.2f", cpu_temperature);
+ if (BootFailed()) {
+ PW_LOG_CRITICAL("Had trouble booting due to error %d", GetErrorCode());
+ ReportErrorsAndHalt();
+ }
+ PW_LOG_INFO("Successfully booted");
+ }
In ``.cc`` files, it is possible to dispense with the ``PW_`` part of the log
names and go for shorter log macros. Include ``pw_log/short.h`` or
``pw_log/shorter.h`` for shorter versions of the macros.
+.. warning::
+ The shorter log macros collide with `Abseil's logging API
+ <https://abseil.io/docs/cpp/guides/logging>`_. Do not use them in upstream
+ Pigweed modules, or any code that may depend on Abseil.
+
.. code-block:: cpp
- #define PW_LOG_MODULE_NAME "BLE"
+ #define PW_LOG_MODULE_NAME "BLE"
+
+ #include "pw_log/shorter.h"
- #include "pw_log/shorter.h"
+ int main() {
+ INF("Booting...");
+ DBG("CPU temp: %.2f", cpu_temperature);
+ if (BootFailed()) {
+ CRT("Had trouble booting due to error %d", GetErrorCode());
+ ReportErrorsAndHalt();
+ }
+ INF("Successfully booted");
+ }
- int main() {
- INF("Booting...");
- DBG("CPU temp: %.2f", cpu_temperature);
- if (BootFailed()) {
- CRT("Had trouble booting due to error %d", GetErrorCode());
- ReportErrorsAndHalt();
- }
- INF("Successfully booted");
- }
+The ``pw_log`` facade also exposes a handful of macros that only apply
+specifically to tokenized logging. See :ref:`module-pw_log-tokenized-args` for
+details.
Layer diagram example: ``stm32f429i-disc1``
===========================================
@@ -71,6 +83,8 @@ turn outputs to the STM32F429 bare metal backend for ``pw_sys_io``, which is
.. image:: example_layer_diagram.svg
+.. _module-pw_log-macros:
+
Logging macros
==============
These are the primary macros for logging information about the functioning of a
@@ -78,45 +92,45 @@ system, intended to be used directly.
.. c:macro:: PW_LOG(level, module, flags, fmt, ...)
- This is the primary mechanism for logging.
+ This is the primary mechanism for logging.
- *level* - An integer level as defined by ``pw_log/levels.h``.
+ *level* - An integer level as defined by ``pw_log/levels.h``.
- *module* - A string literal for the module name. Defaults to
- :c:macro:`PW_LOG_MODULE_NAME`.
+ *module* - A string literal for the module name. Defaults to
+ :c:macro:`PW_LOG_MODULE_NAME`.
- *flags* - Arbitrary flags the backend can leverage. The semantics of these
- flags are not defined in the facade, but are instead meant as a general
- mechanism for communication bits of information to the logging backend.
- ``pw_log`` reserves 2 flag bits by default, but log backends may provide for
- more or fewer flag bits.
+ *flags* - Arbitrary flags the backend can leverage. The semantics of these
+ flags are not defined in the facade, but are instead meant as a general
+ mechanism for communication bits of information to the logging backend.
+ ``pw_log`` reserves 2 flag bits by default, but log backends may provide for
+ more or fewer flag bits.
- Here are some ideas for what a backend might use flags for:
+ Here are some ideas for what a backend might use flags for:
- - Example: ``HAS_PII`` - A log has personally-identifying data
- - Example: ``HAS_DII`` - A log has device-identifying data
- - Example: ``RELIABLE_DELIVERY`` - Ask the backend to ensure the log is
- delivered; this may entail blocking other logs.
- - Example: ``BEST_EFFORT`` - Don't deliver this log if it would mean blocking
- or dropping important-flagged logs
+ - Example: ``HAS_PII`` - A log has personally-identifying data
+ - Example: ``HAS_DII`` - A log has device-identifying data
+ - Example: ``RELIABLE_DELIVERY`` - Ask the backend to ensure the log is
+ delivered; this may entail blocking other logs.
+ - Example: ``BEST_EFFORT`` - Don't deliver this log if it would mean blocking
+ or dropping important-flagged logs
- *fmt* - The message to log, which may contain format specifiers like ``%d``
- or ``%0.2f``.
+ *fmt* - The message to log, which may contain format specifiers like ``%d``
+ or ``%0.2f``.
- Example:
+ Example:
- .. code-block:: cpp
+ .. code-block:: cpp
- PW_LOG(PW_LOG_LEVEL_INFO, PW_LOG_MODULE_NAME, PW_LOG_FLAGS, "Temp is %d degrees", temp);
- PW_LOG(PW_LOG_LEVEL_ERROR, PW_LOG_MODULE_NAME, UNRELIABLE_DELIVERY, "It didn't work!");
+ PW_LOG(PW_LOG_LEVEL_INFO, PW_LOG_MODULE_NAME, PW_LOG_FLAGS, "Temp is %d degrees", temp);
+ PW_LOG(PW_LOG_LEVEL_ERROR, PW_LOG_MODULE_NAME, UNRELIABLE_DELIVERY, "It didn't work!");
- .. note::
+ .. note::
- ``PW_LOG()`` should not be used frequently; typically only when adding
- flags to a particular message to mark PII or to indicate delivery
- guarantees. For most cases, prefer to use the direct ``PW_LOG_INFO`` or
- ``PW_LOG_DEBUG`` style macros, which are often implemented more efficiently
- in the backend.
+ ``PW_LOG()`` should not be used frequently; typically only when adding
+ flags to a particular message to mark PII or to indicate delivery
+ guarantees. For most cases, prefer to use the direct ``PW_LOG_INFO`` or
+ ``PW_LOG_DEBUG`` style macros, which are often implemented more efficiently
+ in the backend.
.. c:macro:: PW_LOG_DEBUG(fmt, ...)
@@ -125,7 +139,7 @@ system, intended to be used directly.
.. c:macro:: PW_LOG_ERROR(fmt, ...)
.. c:macro:: PW_LOG_CRITICAL(fmt, ...)
- Shorthand for ``PW_LOG(<level>, PW_LOG_MODULE_NAME, PW_LOG_FLAGS, fmt, ...)``.
+ Shorthand for ``PW_LOG(<level>, PW_LOG_MODULE_NAME, PW_LOG_FLAGS, fmt, ...)``.
--------------------
Module configuration
@@ -137,22 +151,22 @@ more details.
.. c:macro:: PW_LOG_LEVEL_DEFAULT
- Controls the default value of ``PW_LOG_LEVEL``. Setting
- ``PW_LOG_LEVEL_DEFAULT`` will change the behavior of all source files that
- have not explicitly set ``PW_LOG_LEVEL``. Defaults to ``PW_LOG_LEVEL_DEBUG``.
+ Controls the default value of ``PW_LOG_LEVEL``. Setting
+ ``PW_LOG_LEVEL_DEFAULT`` will change the behavior of all source files that
+ have not explicitly set ``PW_LOG_LEVEL``. Defaults to ``PW_LOG_LEVEL_DEBUG``.
.. c:macro:: PW_LOG_FLAGS_DEFAULT
- Controls the default value of ``PW_LOG_FLAGS``. Setting
- ``PW_LOG_FLAGS_DEFAULT`` will change the behavior of all source files that
- have not explicitly set ``PW_LOG_FLAGS``. Defaults to ``0``.
+ Controls the default value of ``PW_LOG_FLAGS``. Setting
+ ``PW_LOG_FLAGS_DEFAULT`` will change the behavior of all source files that
+ have not explicitly set ``PW_LOG_FLAGS``. Defaults to ``0``.
.. c:macro:: PW_LOG_ENABLE_IF_DEFAULT
- Controls the default value of ``PW_LOG_ENABLE_IF``. Setting
- ``PW_LOG_ENABLE_IF_DEFAULT`` will change the behavior of all source files that
- have not explicitly set ``PW_LOG_ENABLE_IF``. Defaults to
- ``((level) >= PW_LOG_LEVEL)``.
+ Controls the default value of ``PW_LOG_ENABLE_IF``. Setting
+ ``PW_LOG_ENABLE_IF_DEFAULT`` will change the behavior of all source files that
+ have not explicitly set ``PW_LOG_ENABLE_IF``. Defaults to
+ ``((level) >= PW_LOG_LEVEL)``.
Per-source file configuration
@@ -166,38 +180,38 @@ it is included. To handle potential transitive includes, place these
``#defines`` before all ``#include`` statements. This should only be done in
source files, not headers. For example:
- .. code-block:: cpp
+.. code-block:: cpp
- // Set the pw_log option macros here, before ALL of the #includes.
- #define PW_LOG_MODULE_NAME "Calibration"
- #define PW_LOG_LEVEL PW_LOG_LEVEL_WARN
+ // Set the pw_log option macros here, before ALL of the #includes.
+ #define PW_LOG_MODULE_NAME "Calibration"
+ #define PW_LOG_LEVEL PW_LOG_LEVEL_WARN
- #include <array>
- #include <random>
+ #include <array>
+ #include <random>
- #include "devices/hal9000.h"
- #include "pw_log/log.h"
- #include "pw_rpc/server.h"
+ #include "devices/hal9000.h"
+ #include "pw_log/log.h"
+ #include "pw_rpc/server.h"
- int MyFunction() {
- PW_LOG_INFO("hello???");
- }
+ int MyFunction() {
+ PW_LOG_INFO("hello???");
+ }
.. c:macro:: PW_LOG_MODULE_NAME
- A string literal module name to use in logs. Log backends may attach this
- name to log messages or use it for runtime filtering. Defaults to ``""``. The
- ``PW_LOG_MODULE_NAME_DEFINED`` macro is set to ``1`` or ``0`` to indicate
- whether ``PW_LOG_MODULE_NAME`` was overridden.
+ A string literal module name to use in logs. Log backends may attach this
+ name to log messages or use it for runtime filtering. Defaults to ``""``. The
+ ``PW_LOG_MODULE_NAME_DEFINED`` macro is set to ``1`` or ``0`` to indicate
+ whether ``PW_LOG_MODULE_NAME`` was overridden.
.. c:macro:: PW_LOG_FLAGS
- Log flags to use for the ``PW_LOG_<level>`` macros. Different flags may be
- applied when using the ``PW_LOG`` macro directly.
+ Log flags to use for the ``PW_LOG_<level>`` macros. Different flags may be
+ applied when using the ``PW_LOG`` macro directly.
- Log backends use flags to change how they handle individual log messages.
- Potential uses include assigning logs priority or marking them as containing
- personal information. Defaults to ``PW_LOG_FLAGS_DEFAULT``.
+ Log backends use flags to change how they handle individual log messages.
+ Potential uses include assigning logs priority or marking them as containing
+ personal information. Defaults to ``PW_LOG_FLAGS_DEFAULT``.
.. c:macro:: PW_LOG_LEVEL
@@ -209,15 +223,15 @@ source files, not headers. For example:
.. code-block:: cpp
- #define PW_LOG_LEVEL PW_LOG_LEVEL_INFO
+ #define PW_LOG_LEVEL PW_LOG_LEVEL_INFO
- #include "pw_log/log.h"
+ #include "pw_log/log.h"
- void DoSomething() {
- PW_LOG_DEBUG("This won't be logged at all");
- PW_LOG_INFO("This is INFO level, and will display");
- PW_LOG_WARN("This is above INFO level, and will display");
- }
+ void DoSomething() {
+ PW_LOG_DEBUG("This won't be logged at all");
+ PW_LOG_INFO("This is INFO level, and will display");
+ PW_LOG_WARN("This is above INFO level, and will display");
+ }
.. c:macro:: PW_LOG_ENABLE_IF(level, flags)
@@ -230,37 +244,37 @@ source files, not headers. For example:
.. code-block:: cpp
- // Pigweed's log facade will call this macro to decide to log or not. In
- // this case, it will drop logs with the PII flag set if display of PII is
- // not enabled for the application.
- #define PW_LOG_ENABLE_IF(level, flags) \
- (level >= PW_LOG_LEVEL_INFO && \
- !((flags & MY_PRODUCT_PII_MASK) && MY_PRODUCT_LOG_PII_ENABLED)
+ // Pigweed's log facade will call this macro to decide to log or not. In
+ // this case, it will drop logs with the PII flag set if display of PII is
+ // not enabled for the application.
+ #define PW_LOG_ENABLE_IF(level, flags) \
+ (level >= PW_LOG_LEVEL_INFO && \
+ !((flags & MY_PRODUCT_PII_MASK) && MY_PRODUCT_LOG_PII_ENABLED)
- #include "pw_log/log.h"
+ #include "pw_log/log.h"
- // This define might be supplied by the build system.
- #define MY_PRODUCT_LOG_PII_ENABLED false
+ // This define might be supplied by the build system.
+ #define MY_PRODUCT_LOG_PII_ENABLED false
- // This is the PII mask bit selected by the application.
- #define MY_PRODUCT_PII_MASK (1 << 5)
+ // This is the PII mask bit selected by the application.
+ #define MY_PRODUCT_PII_MASK (1 << 5)
- void DoSomethingWithSensitiveInfo() {
- PW_LOG_DEBUG("This won't be logged at all");
- PW_LOG_INFO("This is INFO level, and will display");
+ void DoSomethingWithSensitiveInfo() {
+ PW_LOG_DEBUG("This won't be logged at all");
+ PW_LOG_INFO("This is INFO level, and will display");
- // In this example, this will not be logged since logging with PII
- // is disabled by the above macros.
- PW_LOG(PW_LOG_LEVEL_INFO,
- MY_PRODUCT_PII_MASK,
- "Sensitive: %d",
- sensitive_info);
- }
+ // In this example, this will not be logged since logging with PII
+ // is disabled by the above macros.
+ PW_LOG(PW_LOG_LEVEL_INFO,
+ MY_PRODUCT_PII_MASK,
+ "Sensitive: %d",
+ sensitive_info);
+ }
.. attention::
- At this time, only compile time filtering is supported. In the future, we
- plan to add support for runtime filtering.
+ At this time, only compile time filtering is supported. In the future, we
+ plan to add support for runtime filtering.
------------------
Logging attributes
@@ -297,6 +311,8 @@ common for the ``pw_log`` backend to cause circular dependencies. Because of
this, log backends may avoid declaring explicit dependencies, instead relying
on include paths to access header files.
+GN
+==
In GN, the ``pw_log`` backend's full implementation with true dependencies is
made available through the ``$dir_pw_log:impl`` group. When ``pw_log_BACKEND``
is set, ``$dir_pw_log:impl`` must be listed in the ``pw_build_LINK_DEPS``
@@ -313,6 +329,18 @@ to directly provide dependencies through include paths only, rather than GN
``public_deps``. In this case, GN header checking can be disabled with
``check_includes = false``.
+.. _module-pw_log-bazel-backend_impl:
+
+Bazel
+=====
+In Bazel, log backends may avoid cyclic dependencies by placing the full
+implementation in an ``impl`` target, like ``//pw_log_tokenized:impl``. The
+``//pw_log:backend_impl`` label flag should be set to the ``impl`` target
+required by the log backend used by the platform.
+
+You must add a dependency on the ``@pigweed//pw_log:backend_impl`` target to
+any binary using ``pw_log``.
+
----------------------
Google Logging Adapter
----------------------
@@ -327,15 +355,15 @@ Configuration
.. c:macro:: PW_LOG_CFG_GLOG_BUFFER_SIZE_BYTES
- The size of the stack-allocated buffer used by the Google Logging (glog)
- macros. This only affects the glog macros provided through pw_log/glog.h.
+ The size of the stack-allocated buffer used by the Google Logging (glog)
+ macros. This only affects the glog macros provided through pw_log/glog.h.
- Pigweed strongly recommends sticking to printf-style logging instead
- of C++ stream-style Google Log logging unless absolutely necessary. The glog
- macros are only provided for compatibility with non-embedded code. See
- :ref:`module-pw_log-design-discussion` for more details.
+ Pigweed strongly recommends sticking to printf-style logging instead
+ of C++ stream-style Google Log logging unless absolutely necessary. The glog
+ macros are only provided for compatibility with non-embedded code. See
+ :ref:`module-pw_log-design-discussion` for more details.
- Undersizing this buffer will result in truncated log messages.
+ Undersizing this buffer will result in truncated log messages.
-----------------
Design discussion
@@ -354,14 +382,14 @@ Consider this example use of Google Log:
.. code-block:: cpp
- LOG(INFO) << "My temperature is " << temperature << ". State: " << state;
+ LOG(INFO) << "My temperature is " << temperature << ". State: " << state;
This log statement has two string literals. It might seem like one could convert
move to tokenization:
.. code-block:: cpp
- LOG(INFO) << TOKEN("My temperature is ") << temperature << TOKEN(". State: ") << state;
+ LOG(INFO) << TOKEN("My temperature is ") << temperature << TOKEN(". State: ") << state;
However, this doesn't work. The key problem is that the tokenization system
needs to allocate the string in a linker section that is excluded from the
@@ -376,8 +404,8 @@ string constant, which can be expanded by the preprocessor (as part of
.. code-block:: cpp
- // Note: LOG_INFO can be tokenized behind the macro; transparent to users.
- PW_LOG_INFO("My temperature is %d. State: %s", temperature, state);
+ // Note: LOG_INFO can be tokenized behind the macro; transparent to users.
+ PW_LOG_INFO("My temperature is %d. State: %s", temperature, state);
Additionally, while Pigweed is mostly C++, it a practical reality that at times
projects using Pigweed will need to log from third-party libraries written in
@@ -426,3 +454,35 @@ implementation in ``pw_log/java/main`` simply wraps a
``com.google.common.flogger.FluentLogger``. An implementation that logs to
Android's ``android.util.Log`` instead is provided in
``pw_log/java/android_main``.
+
+----------------
+pw_log in Python
+----------------
+``pw_log`` provides utilities to decode ``LogEntries`` and the encapsulated
+``LogEntry`` proto messages.
+
+The ``Log`` class represents a decoded ``LogEntry`` in a human-readable and
+consumable fashion.
+
+The ``LogStreamDecoder`` offers APIs to decode ``LogEntries`` and ``LogEntry``
+while tracking and logging log drops. It requires a ``decoded_log_handler`` to
+pass decoded logs to. This class can also be customized to use an optional token
+database if the message, module and thread names are tokenized; a custom
+timestamp parser; and optional message parser for any extra message parsing.
+``pw_log`` includes examples for customizing the ``LogStreamDecoder``:
+``timestamp_parser_ns_since_boot`` parses the timestamp number from nanoseconds
+since boot to an HH:MM::SS string, ``log_decoded_log`` emits a decoded ``Log``
+to the provided logger in a format known to ``pw console``, and
+``pw_status_code_to_name`` searches the decoded log message for a matching
+pattern encapsulating the status code number and replaces it with the status
+name.
+
+Python API
+==========
+
+pw_log.log_decoder
+------------------
+.. automodule:: pw_log.log_decoder
+ :members: Log, LogStreamDecoder
+ :undoc-members:
+ :show-inheritance:
diff --git a/pw_log/glog_adapter_test.cc b/pw_log/glog_adapter_test.cc
index d6691cea0..ce386d43f 100644
--- a/pw_log/glog_adapter_test.cc
+++ b/pw_log/glog_adapter_test.cc
@@ -11,7 +11,7 @@
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
-// TODO(b/235289499): Add verification of the actually logged statements.
+// TODO: b/235289499 - Add verification of the actually logged statements.
// clang-format off
#define PW_LOG_MODULE_NAME "TST"
diff --git a/pw_log/public/pw_log/options.h b/pw_log/public/pw_log/options.h
index 87936f6db..f43762aec 100644
--- a/pw_log/public/pw_log/options.h
+++ b/pw_log/public/pw_log/options.h
@@ -68,7 +68,7 @@
#endif // PW_LOG_FLAGS
// DEPRECATED: Use PW_LOG_FLAGS.
-// TODO(b/234876701): Remove this macro after migration.
+// TODO: b/234876701 - Remove this macro after migration.
#ifndef PW_LOG_DEFAULT_FLAGS
#define PW_LOG_DEFAULT_FLAGS PW_LOG_FLAGS
#endif // PW_LOG_DEFAULT_FLAGS
diff --git a/pw_log/public/pw_log/tokenized_args.h b/pw_log/public/pw_log/tokenized_args.h
new file mode 100644
index 000000000..06b0d5a0c
--- /dev/null
+++ b/pw_log/public/pw_log/tokenized_args.h
@@ -0,0 +1,52 @@
+// Copyright 2023 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+#pragma once
+
+// pw_log backends that use pw_tokenizer and want to support nested tokenization
+// define this file under their public_overrides/ directory to activate the
+// PW_LOG_TOKEN aliases. If this file does not exist in the log backend,
+// arguments behave as basic strings (const char*).
+#if __has_include("pw_log_backend/log_backend_uses_pw_tokenizer.h")
+
+#include "pw_tokenizer/nested_tokenization.h"
+#include "pw_tokenizer/tokenize.h"
+
+#define PW_LOG_TOKEN_TYPE pw_tokenizer_Token
+#define PW_LOG_TOKEN PW_TOKENIZE_STRING
+#define PW_LOG_TOKEN_EXPR PW_TOKENIZE_STRING_EXPR
+#define PW_LOG_TOKEN_FMT PW_TOKEN_FMT
+
+#else
+
+/// If nested tokenization is supported by the logging backend, this is an
+/// alias for `pw_tokenizer_Token`.
+///
+/// For non-tokenizing backends, defaults to `const char*`.
+#define PW_LOG_TOKEN_TYPE const char*
+
+/// If nested tokenization is supported by the logging backend, this is an
+/// alias for `PW_TOKENIZE_STRING`. No-op otherwise.
+#define PW_LOG_TOKEN(string_literal) string_literal
+
+/// If nested tokenization is supported by the logging backend, this is an
+/// alias for `PW_TOKENIZE_STRING_EXPR`. No-op otherwise.
+#define PW_LOG_TOKEN_EXPR(string_literal) string_literal
+
+/// If nested tokenization is supported by the logging backend, this is an
+/// alias for `PW_TOKEN_FORMAT`.
+///
+/// For non-tokenizing backends, defaults to the string specifier `%s`.
+#define PW_LOG_TOKEN_FMT() "%s"
+
+#endif //__has_include("log_backend/log_backend_uses_pw_tokenizer.h")
diff --git a/pw_log/py/BUILD.bazel b/pw_log/py/BUILD.bazel
new file mode 100644
index 000000000..93ea9296b
--- /dev/null
+++ b/pw_log/py/BUILD.bazel
@@ -0,0 +1,44 @@
+# Copyright 2022 The Pigweed Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+package(default_visibility = ["//visibility:public"])
+
+py_library(
+ name = "pw_log",
+ srcs = [
+ "pw_log/__init__.py",
+ "pw_log/log_decoder.py",
+ ],
+ imports = ["."],
+ deps = [
+ # TODO: b/241456982 - Add this dep back in
+ # "//pw_log:log_proto_py_pb2",
+ "//pw_log_tokenized/py:pw_log_tokenized",
+ "//pw_rpc/py:pw_rpc",
+ "//pw_status/py:pw_status",
+ "//pw_tokenizer/py:pw_tokenizer",
+ ],
+)
+
+# TODO: b/241456982 - Not expected to build yet.
+py_test(
+ name = "log_decoder_test",
+ srcs = [
+ "log_decoder_test.py",
+ ],
+ tags = ["manual"],
+ deps = [
+ ":pw_log",
+ ],
+)
diff --git a/pw_log/py/BUILD.gn b/pw_log/py/BUILD.gn
new file mode 100644
index 000000000..44c685c5e
--- /dev/null
+++ b/pw_log/py/BUILD.gn
@@ -0,0 +1,52 @@
+# Copyright 2023 The Pigweed Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+import("//build_overrides/pigweed.gni")
+
+import("$dir_pw_build/python.gni")
+import("$dir_pw_protobuf_compiler/proto.gni")
+
+pw_python_package("py") {
+ generate_setup = {
+ metadata = {
+ name = "pw_log"
+ version = "0.0.1"
+ }
+ }
+ sources = [
+ "pw_log/__init__.py",
+ "pw_log/log_decoder.py",
+ ]
+ tests = [ "log_decoder_test.py" ]
+ python_test_deps = []
+ pylintrc = "$dir_pigweed/.pylintrc"
+ mypy_ini = "$dir_pigweed/.mypy.ini"
+ python_deps = [
+ "$dir_pw_log_tokenized/py",
+ "$dir_pw_status/py",
+ "$dir_pw_tokenizer/py",
+ ]
+ proto_library = "..:protos"
+}
+
+# This setup.py may be used to install pw_tokenizer without GN. It does not
+# include the pw_tokenizer.proto subpackage, since it contains a generated
+# protobuf module.
+pw_python_script("setup") {
+ sources = [ "setup.py" ]
+ inputs = [
+ "setup.cfg",
+ "pyproject.toml",
+ ]
+}
diff --git a/pw_log/py/log_decoder_test.py b/pw_log/py/log_decoder_test.py
new file mode 100644
index 000000000..1629aee22
--- /dev/null
+++ b/pw_log/py/log_decoder_test.py
@@ -0,0 +1,869 @@
+# Copyright 2023 The Pigweed Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+"""Log decoder tests."""
+
+from dataclasses import dataclass
+import logging
+from random import randint
+from typing import Any, List
+from unittest import TestCase, main
+
+from pw_log.log_decoder import (
+ Log,
+ LogStreamDecoder,
+ log_decoded_log,
+ pw_status_code_to_name,
+ timestamp_parser_ns_since_boot,
+)
+from pw_log.proto import log_pb2
+import pw_tokenizer
+
+_MESSAGE_NO_FILENAME = 'World'
+_MESSAGE_AND_ARGS_NO_FILENAME = f'■msg♦{_MESSAGE_NO_FILENAME}'
+_MESSAGE_TOKEN_NO_FILENAME = pw_tokenizer.tokens.pw_tokenizer_65599_hash(
+ _MESSAGE_AND_ARGS_NO_FILENAME
+)
+
+# Creating a database with tokenized information for the core to detokenize
+# tokenized log entries.
+_TOKEN_DATABASE = pw_tokenizer.tokens.Database(
+ [
+ pw_tokenizer.tokens.TokenizedStringEntry(0x01148A48, 'total_dropped'),
+ pw_tokenizer.tokens.TokenizedStringEntry(
+ 0x03796798, 'min_queue_remaining'
+ ),
+ pw_tokenizer.tokens.TokenizedStringEntry(0x2E668CD6, 'Jello, world!'),
+ pw_tokenizer.tokens.TokenizedStringEntry(0x329481A2, 'parser_errors'),
+ pw_tokenizer.tokens.TokenizedStringEntry(0x7F35A9A5, 'TestName'),
+ pw_tokenizer.tokens.TokenizedStringEntry(0xCC6D3131, 'Jello?'),
+ pw_tokenizer.tokens.TokenizedStringEntry(
+ 0x144C501D, '■msg♦SampleMessage■module♦MODULE■file♦file/path.cc'
+ ),
+ pw_tokenizer.tokens.TokenizedStringEntry(0x0000106A, 'ModuleOrMessage'),
+ pw_tokenizer.tokens.TokenizedStringEntry(
+ pw_tokenizer.tokens.pw_tokenizer_65599_hash(
+ '■msg♦World■module♦wifi■file♦/path/to/file.cc'
+ ),
+ '■msg♦World■module♦wifi■file♦/path/to/file.cc',
+ ),
+ pw_tokenizer.tokens.TokenizedStringEntry(
+ _MESSAGE_TOKEN_NO_FILENAME, _MESSAGE_AND_ARGS_NO_FILENAME
+ ),
+ ]
+)
+_DETOKENIZER = pw_tokenizer.Detokenizer(_TOKEN_DATABASE)
+
+
+def _create_log_entry_with_tokenized_fields(
+ message: str, module: str, file: str, thread: str, line: int, level: int
+) -> log_pb2.LogEntry:
+ """Tokenizing tokenizable LogEntry fields to become a detoknized log."""
+ tokenized_message = pw_tokenizer.encode.encode_token_and_args(
+ pw_tokenizer.tokens.pw_tokenizer_65599_hash(message)
+ )
+ tokenized_module = pw_tokenizer.encode.encode_token_and_args(
+ pw_tokenizer.tokens.pw_tokenizer_65599_hash(module)
+ )
+ tokenized_file = pw_tokenizer.encode.encode_token_and_args(
+ pw_tokenizer.tokens.pw_tokenizer_65599_hash(file)
+ )
+ tokenized_thread = pw_tokenizer.encode.encode_token_and_args(
+ pw_tokenizer.tokens.pw_tokenizer_65599_hash(thread)
+ )
+
+ return log_pb2.LogEntry(
+ message=tokenized_message,
+ module=tokenized_module,
+ file=tokenized_file,
+ line_level=Log.pack_line_level(line, level),
+ thread=tokenized_thread,
+ )
+
+
+def _create_random_log_entry() -> log_pb2.LogEntry:
+ return log_pb2.LogEntry(
+ message=bytes(f'message {randint(1,100)}'.encode('utf-8')),
+ line_level=Log.pack_line_level(
+ randint(0, 2000), randint(logging.DEBUG, logging.CRITICAL)
+ ),
+ file=b'main.cc',
+ thread=bytes(f'thread {randint(1,5)}'.encode('utf-8')),
+ )
+
+
+def _create_drop_count_message_log_entry(
+ drop_count: int, reason: str = ''
+) -> log_pb2.LogEntry:
+ log_entry = log_pb2.LogEntry(dropped=drop_count)
+ if reason:
+ log_entry.message = bytes(reason.encode('utf-8'))
+ return log_entry
+
+
+class TestLogStreamDecoderBase(TestCase):
+ """Base Test class for LogStreamDecoder."""
+
+ def setUp(self) -> None:
+ """Set up logs decoder."""
+
+ def parse_pw_status(msg: str) -> str:
+ return pw_status_code_to_name(msg)
+
+ self.captured_logs: List[Log] = []
+
+ def decoded_log_handler(log: Log) -> None:
+ self.captured_logs.append(log)
+
+ self.decoder = LogStreamDecoder(
+ decoded_log_handler=decoded_log_handler,
+ detokenizer=_DETOKENIZER,
+ source_name='source',
+ timestamp_parser=timestamp_parser_ns_since_boot,
+ message_parser=parse_pw_status,
+ )
+
+ def _captured_logs_as_str(self) -> str:
+ return '\n'.join(map(str, self.captured_logs))
+
+
+class TestLogStreamDecoderDecodingFunctionality(TestLogStreamDecoderBase):
+ """Tests LogStreamDecoder decoding functionality."""
+
+ def test_parse_log_entry_valid_non_tokenized(self) -> None:
+ """Test that valid LogEntry protos are parsed correctly."""
+ expected_log = Log(
+ message='Hello',
+ file_and_line='my/path/file.cc:123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ )
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(
+ message=b'Hello',
+ file=b'my/path/file.cc',
+ line_level=Log.pack_line_level(123, logging.INFO),
+ )
+ )
+ self.assertEqual(result, expected_log)
+
+ def test_parse_log_entry_valid_packed_message(self) -> None:
+ """Test that valid LogEntry protos are parsed correctly."""
+ log_with_metadata_in_message = Log(
+ message='World',
+ file_and_line='/path/to/file.cc',
+ level=logging.DEBUG,
+ source_name=self.decoder.source_name,
+ module_name='wifi',
+ timestamp='0:00',
+ )
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(
+ message=bytes(
+ '■msg♦World■module♦wifi■file♦/path/to/file.cc'.encode(
+ 'utf-8'
+ )
+ ),
+ line_level=Log.pack_line_level(0, logging.DEBUG),
+ timestamp=100,
+ )
+ )
+ self.assertEqual(result, log_with_metadata_in_message)
+
+ def test_parse_log_entry_valid_logs_drop_message(self) -> None:
+ """Test that valid LogEntry protos are parsed correctly."""
+ dropped_message = Log(
+ message='Dropped 30 logs due to buffer too small',
+ level=logging.WARNING,
+ source_name=self.decoder.source_name,
+ )
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(message=b'buffer too small', dropped=30)
+ )
+ self.assertEqual(result, dropped_message)
+
+ def test_parse_log_entry_valid_tokenized(self) -> None:
+ """Test that tokenized LogEntry protos are parsed correctly."""
+ message = 'Jello, world!'
+ module_name = 'TestName'
+ file = 'parser_errors'
+ thread_name = 'Jello?'
+ line = 123
+ level = logging.INFO
+
+ expected_log = Log(
+ message=message,
+ module_name=module_name,
+ file_and_line=file + ':123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ thread_name=thread_name,
+ )
+
+ log_entry = _create_log_entry_with_tokenized_fields(
+ message, module_name, file, thread_name, line, level
+ )
+ result = self.decoder.parse_log_entry_proto(log_entry)
+ self.assertEqual(result, expected_log, msg='Log was not detokenized')
+
+ def test_tokenized_contents_not_detokenized(self):
+ """Test fields with tokens not in the database are not decrypted."""
+ # The following strings do not have tokens in the device token db.
+ message_not_in_db = 'device is shutting down.'
+ module_name_not_in_db = 'Battery'
+ file_not_in_db = 'charger.cc'
+ thread_name_not_in_db = 'BatteryStatus'
+ line = 123
+ level = logging.INFO
+
+ log_entry = _create_log_entry_with_tokenized_fields(
+ message_not_in_db,
+ module_name_not_in_db,
+ file_not_in_db,
+ thread_name_not_in_db,
+ line,
+ level,
+ )
+ message = pw_tokenizer.proto.decode_optionally_tokenized(
+ _DETOKENIZER, log_entry.message
+ )
+ module = pw_tokenizer.proto.decode_optionally_tokenized(
+ _DETOKENIZER, log_entry.module
+ )
+ file = pw_tokenizer.proto.decode_optionally_tokenized(
+ _DETOKENIZER, log_entry.file
+ )
+ thread = pw_tokenizer.proto.decode_optionally_tokenized(
+ _DETOKENIZER, log_entry.thread
+ )
+ expected_log = Log(
+ message=message,
+ module_name=module,
+ file_and_line=file + ':123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ thread_name=thread,
+ )
+ result = self.decoder.parse_log_entry_proto(log_entry)
+ self.assertEqual(
+ result, expected_log, msg='Log was unexpectedly detokenized'
+ )
+
+ def test_extracting_log_entry_fields_from_tokenized_metadata(self):
+ """Test that tokenized metadata can be used to extract other fields."""
+ metadata = '■msg♦World■module♦wifi■file♦/path/to/file.cc'
+ thread_name = 'M0Log'
+
+ log_entry = log_pb2.LogEntry(
+ message=pw_tokenizer.encode.encode_token_and_args(
+ pw_tokenizer.tokens.pw_tokenizer_65599_hash(metadata)
+ ),
+ line_level=Log.pack_line_level(0, logging.DEBUG),
+ thread=bytes(thread_name.encode('utf-8')),
+ )
+
+ log_with_metadata_in_message = Log(
+ message='World',
+ file_and_line='/path/to/file.cc',
+ level=logging.DEBUG,
+ source_name=self.decoder.source_name,
+ module_name='wifi',
+ timestamp='0:00',
+ thread_name=thread_name,
+ )
+
+ result = self.decoder.parse_log_entry_proto(log_entry)
+ self.assertEqual(
+ result, log_with_metadata_in_message, msg='Log was detokenized.'
+ )
+
+ def test_extracting_status_argument_from_log_message(self):
+ """Test extract status from log message."""
+ expected_log = Log(
+ message='Could not start flux capacitor: PERMISSION_DENIED',
+ file_and_line='my/path/file.cc:123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ )
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(
+ message=b'Could not start flux capacitor: pw::Status=7',
+ file=b'my/path/file.cc',
+ line_level=Log.pack_line_level(123, logging.INFO),
+ )
+ )
+ self.assertEqual(
+ result,
+ expected_log,
+ msg='Status was not extracted from log message.',
+ )
+
+ expected_log = Log(
+ message='Error connecting to server: UNAVAILABLE',
+ file_and_line='my/path/file.cc:123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ )
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(
+ message=b'Error connecting to server: pw::Status=14',
+ file=b'my/path/file.cc',
+ line_level=Log.pack_line_level(123, logging.INFO),
+ )
+ )
+ self.assertEqual(
+ result,
+ expected_log,
+ msg='Status was not extracted from log message.',
+ )
+
+ def test_extracting_status_with_improper_spacing(self):
+ """Test spaces before pw::Status are ignored."""
+ expected_log = Log(
+ message='Error connecting to server:UNAVAILABLE',
+ file_and_line='my/path/file.cc:123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ )
+
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(
+ message=b'Error connecting to server:pw::Status=14',
+ file=b'my/path/file.cc',
+ line_level=Log.pack_line_level(123, logging.INFO),
+ )
+ )
+ self.assertEqual(
+ result,
+ expected_log,
+ msg='Status was not extracted from log message.',
+ )
+
+ expected_log = Log(
+ message='Error connecting to server: UNAVAILABLE',
+ file_and_line='my/path/file.cc:123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ )
+
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(
+ message=b'Error connecting to server: pw::Status=14',
+ file=b'my/path/file.cc',
+ line_level=Log.pack_line_level(123, logging.INFO),
+ )
+ )
+ self.assertEqual(
+ result,
+ expected_log,
+ msg='Status was not extracted from log message.',
+ )
+
+ def test_not_extracting_status_extra_space_before_code(self):
+ """Test spaces after pw::Status are not allowed."""
+ expected_log = Log(
+ message='Could not start flux capacitor: pw::Status= 7',
+ file_and_line='my/path/file.cc:123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ )
+
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(
+ message=b'Could not start flux capacitor: pw::Status= 7',
+ file=b'my/path/file.cc',
+ line_level=Log.pack_line_level(123, logging.INFO),
+ )
+ )
+ self.assertEqual(
+ result,
+ expected_log,
+ msg='Status was not extracted from log message.',
+ )
+
+ def test_not_extracting_status_new_line_before_code(self):
+ """Test new line characters after pw::Status are not allowed."""
+ expected_log = Log(
+ message='Could not start flux capacitor: pw::Status=\n7',
+ file_and_line='my/path/file.cc:123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ )
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(
+ message=b'Could not start flux capacitor: pw::Status=\n7',
+ file=b'my/path/file.cc',
+ line_level=Log.pack_line_level(123, logging.INFO),
+ )
+ )
+ self.assertEqual(
+ result,
+ expected_log,
+ msg='Status was not extracted from log message.',
+ )
+
+ def test_not_extracting_status_from_log_message_with_improper_format(self):
+ """Test status not extracted from log message with incorrect format."""
+ expected_log = Log(
+ message='Could not start flux capacitor: pw::Status12',
+ file_and_line='my/path/file.cc:123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ )
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(
+ message=b'Could not start flux capacitor: pw::Status12',
+ file=b'my/path/file.cc',
+ line_level=Log.pack_line_level(123, logging.INFO),
+ )
+ )
+ self.assertEqual(
+ result,
+ expected_log,
+ msg='Status was not extracted from log message.',
+ )
+
+ def test_status_code_in_message_does_not_exist(self):
+ """Test status does not exist in pw_status."""
+ expected_log = Log(
+ message='Could not start flux capacitor: pw::Status=17',
+ file_and_line='my/path/file.cc:123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ )
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(
+ message=b'Could not start flux capacitor: pw::Status=17',
+ file=b'my/path/file.cc',
+ line_level=Log.pack_line_level(123, logging.INFO),
+ )
+ )
+ self.assertEqual(
+ result,
+ expected_log,
+ msg='Status was not extracted from log message.',
+ )
+
+ def test_status_code_in_message_is_negative(self):
+ """Test status code is negative."""
+ expected_log = Log(
+ message='Could not start flux capacitor: pw::Status=-1',
+ file_and_line='my/path/file.cc:123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ )
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(
+ message=b'Could not start flux capacitor: pw::Status=-1',
+ file=b'my/path/file.cc',
+ line_level=Log.pack_line_level(123, logging.INFO),
+ )
+ )
+ self.assertEqual(
+ result,
+ expected_log,
+ msg='Status was not extracted from log message.',
+ )
+
+ def test_status_code_is_name(self):
+ """Test if the status code format includes the name instead."""
+ expected_log = Log(
+ message='Cannot use flux capacitor: pw::Status=PERMISSION_DENIED',
+ file_and_line='my/path/file.cc:123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ )
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(
+ message=(
+ b'Cannot use flux capacitor: pw::Status=PERMISSION_DENIED'
+ ),
+ file=b'my/path/file.cc',
+ line_level=Log.pack_line_level(123, logging.INFO),
+ )
+ )
+ self.assertEqual(
+ result,
+ expected_log,
+ msg='Status was not extracted from log message.',
+ )
+
+ def test_spelling_mistakes_with_status_keyword(self):
+ """Test spelling mistakes with status keyword."""
+ expected_log = Log(
+ message='Could not start flux capacitor: pw::Rtatus=12',
+ file_and_line='my/path/file.cc:123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ )
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(
+ message=b'Could not start flux capacitor: pw::Rtatus=12',
+ file=b'my/path/file.cc',
+ line_level=Log.pack_line_level(123, logging.INFO),
+ )
+ )
+ self.assertEqual(
+ result,
+ expected_log,
+ msg='Status was not extracted from log message.',
+ )
+
+ def test_spelling_mistakes_with_status_keyword_lowercase_s(self):
+ """Test spelling mistakes with status keyword."""
+ expected_log = Log(
+ message='Could not start flux capacitor: pw::status=13',
+ file_and_line='my/path/file.cc:123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ )
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(
+ message=b'Could not start flux capacitor: pw::status=13',
+ file=b'my/path/file.cc',
+ line_level=Log.pack_line_level(123, logging.INFO),
+ )
+ )
+ self.assertEqual(
+ result,
+ expected_log,
+ msg='Status was not extracted from log message.',
+ )
+
+ def test_status_code_at_beginning_of_message(self):
+ """Test embedded status argument is found."""
+ expected_log = Log(
+ message='UNAVAILABLE to connect to server.',
+ file_and_line='my/path/file.cc:123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ )
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(
+ message=b'pw::Status=14 to connect to server.',
+ file=b'my/path/file.cc',
+ line_level=Log.pack_line_level(123, logging.INFO),
+ )
+ )
+ self.assertEqual(
+ result,
+ expected_log,
+ msg='Status was not extracted from log message.',
+ )
+
+ def test_status_code_in_the_middle_of_message(self):
+ """Test embedded status argument is found."""
+ expected_log = Log(
+ message='Connection error: UNAVAILABLE connecting to server.',
+ file_and_line='my/path/file.cc:123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ )
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(
+ message=(
+ b'Connection error: pw::Status=14 connecting to server.'
+ ),
+ file=b'my/path/file.cc',
+ line_level=Log.pack_line_level(123, logging.INFO),
+ )
+ )
+ self.assertEqual(
+ result,
+ expected_log,
+ msg='Status was not extracted from log message.',
+ )
+
+ def test_status_code_with_no_surrounding_spaces(self):
+ """Test embedded status argument is found."""
+ expected_log = Log(
+ message='Connection error:UNAVAILABLEconnecting to server.',
+ file_and_line='my/path/file.cc:123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ )
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(
+ message=b'Connection error:pw::Status=14connecting to server.',
+ file=b'my/path/file.cc',
+ line_level=Log.pack_line_level(123, logging.INFO),
+ )
+ )
+ self.assertEqual(
+ result,
+ expected_log,
+ msg='Status was not extracted from log message.',
+ )
+
+ def test_multiple_status_arguments_in_log_message(self):
+ """Test replacement of multiple status arguments into status string."""
+ expected_log = Log(
+ message='Connection error: UNAVAILABLE and PERMISSION_DENIED.',
+ file_and_line='my/path/file.cc:123',
+ level=logging.INFO,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ )
+ result = self.decoder.parse_log_entry_proto(
+ log_pb2.LogEntry(
+ message=b'Connection error: pw::Status=14 and pw::Status=7.',
+ file=b'my/path/file.cc',
+ line_level=Log.pack_line_level(123, logging.INFO),
+ )
+ )
+ self.assertEqual(
+ result,
+ expected_log,
+ msg='Status was not extracted from log message.',
+ )
+
+ def test_no_filename_in_message_parses_successfully(self):
+ """Test that if the file name is not present the log entry is parsed."""
+ thread_name = 'thread'
+
+ log_entry = log_pb2.LogEntry(
+ message=pw_tokenizer.encode.encode_token_and_args(
+ _MESSAGE_TOKEN_NO_FILENAME
+ ),
+ line_level=Log.pack_line_level(123, logging.DEBUG),
+ thread=bytes(thread_name.encode('utf-8')),
+ )
+ expected_log = Log(
+ message=_MESSAGE_NO_FILENAME,
+ file_and_line=':123',
+ level=logging.DEBUG,
+ source_name=self.decoder.source_name,
+ timestamp='0:00',
+ thread_name=thread_name,
+ )
+ result = self.decoder.parse_log_entry_proto(log_entry)
+ self.assertEqual(result, expected_log)
+
+ def test_log_decoded_log(self):
+ """Test that the logger correctly formats a decoded log."""
+ test_log = Log(
+ message="SampleMessage",
+ level=logging.DEBUG,
+ timestamp='1:30',
+ module_name="MyModule",
+ source_name="MySource",
+ thread_name="MyThread",
+ file_and_line='my_file.cc:123',
+ metadata_fields={'field1': 432, 'field2': 'value'},
+ )
+
+ class CapturingLogger(logging.Logger):
+ """Captures values passed to log().
+
+ Tests that calls to log() have the correct level, extra arguments
+ and the message format string and arguments match.
+ """
+
+ @dataclass(frozen=True)
+ class LoggerLog:
+ """Represents a process log() call."""
+
+ level: int
+ message: str
+ kwargs: Any
+
+ def __init__(self):
+ super().__init__(name="CapturingLogger")
+ self.log_calls: List[CapturingLogger.LoggerLog] = []
+
+ def log(self, level, msg, *args, **kwargs) -> None:
+ log = CapturingLogger.LoggerLog(
+ level=level, message=msg % args, kwargs=kwargs
+ )
+ self.log_calls.append(log)
+
+ test_logger = CapturingLogger()
+ log_decoded_log(test_log, test_logger)
+ self.assertEqual(len(test_logger.log_calls), 1)
+ self.assertEqual(test_logger.log_calls[0].level, test_log.level)
+ self.assertEqual(
+ test_logger.log_calls[0].message,
+ '[%s] %s %s %s %s'
+ % (
+ test_log.source_name,
+ test_log.module_name,
+ test_log.timestamp,
+ test_log.message,
+ test_log.file_and_line,
+ ),
+ )
+ self.assertEqual(
+ test_logger.log_calls[0].kwargs['extra']['extra_metadata_fields'],
+ test_log.metadata_fields,
+ )
+
+
+class TestLogStreamDecoderLogDropDetectionFunctionality(
+ TestLogStreamDecoderBase
+):
+ """Tests LogStreamDecoder log drop detection functionality."""
+
+ def test_log_drops_transport_error(self):
+ """Tests log drops at transport."""
+ log_entry_in_log_entries_1 = 3
+ log_entries_1 = log_pb2.LogEntries(
+ first_entry_sequence_id=0,
+ entries=[
+ _create_random_log_entry()
+ for _ in range(log_entry_in_log_entries_1)
+ ],
+ )
+ self.decoder.parse_log_entries_proto(log_entries_1)
+ # Assume a second LogEntries was dropped with 5 log entries. I.e. a
+ # log_entries_2 with sequence_ie = 4 and 5 log entries.
+ log_entry_in_log_entries_2 = 5
+ log_entry_in_log_entries_3 = 3
+ log_entries_3 = log_pb2.LogEntries(
+ first_entry_sequence_id=log_entry_in_log_entries_2
+ + log_entry_in_log_entries_3,
+ entries=[
+ _create_random_log_entry()
+ for _ in range(log_entry_in_log_entries_3)
+ ],
+ )
+ self.decoder.parse_log_entries_proto(log_entries_3)
+
+ # The drop message is placed where the dropped logs were detected.
+ self.assertEqual(
+ len(self.captured_logs),
+ log_entry_in_log_entries_1 + 1 + log_entry_in_log_entries_3,
+ msg=(
+ 'Unexpected number of messages received: '
+ f'{self._captured_logs_as_str()}'
+ ),
+ )
+ self.assertEqual(
+ (
+ f'Dropped {log_entry_in_log_entries_2} logs due to '
+ f'{LogStreamDecoder.DROP_REASON_LOSS_AT_TRANSPORT}'
+ ),
+ self.captured_logs[log_entry_in_log_entries_1].message,
+ )
+
+ def test_log_drops_source_not_connected(self):
+ """Tests log drops when source of the logs was not connected."""
+ log_entry_in_log_entries = 4
+ drop_count = 7
+ log_entries = log_pb2.LogEntries(
+ first_entry_sequence_id=drop_count,
+ entries=[
+ _create_random_log_entry()
+ for _ in range(log_entry_in_log_entries)
+ ],
+ )
+ self.decoder.parse_log_entries_proto(log_entries)
+
+ # The drop message is placed where the log drops was detected.
+ self.assertEqual(
+ len(self.captured_logs),
+ 1 + log_entry_in_log_entries,
+ msg=(
+ 'Unexpected number of messages received: '
+ f'{self._captured_logs_as_str()}'
+ ),
+ )
+ self.assertEqual(
+ (
+ f'Dropped {drop_count} logs due to '
+ f'{LogStreamDecoder.DROP_REASON_SOURCE_NOT_CONNECTED}'
+ ),
+ self.captured_logs[0].message,
+ )
+
+ def test_log_drops_source_enqueue_failure_no_message(self):
+ """Tests log drops when source reports log drops."""
+ drop_count = 5
+ log_entries = log_pb2.LogEntries(
+ first_entry_sequence_id=0,
+ entries=[
+ _create_random_log_entry(),
+ _create_random_log_entry(),
+ _create_drop_count_message_log_entry(drop_count),
+ _create_random_log_entry(),
+ ],
+ )
+ self.decoder.parse_log_entries_proto(log_entries)
+
+ # The drop message is placed where the log drops was detected.
+ self.assertEqual(
+ len(self.captured_logs),
+ 4,
+ msg=(
+ 'Unexpected number of messages received: '
+ f'{self._captured_logs_as_str()}'
+ ),
+ )
+ self.assertEqual(
+ (
+ f'Dropped {drop_count} logs due to '
+ f'{LogStreamDecoder.DROP_REASON_SOURCE_ENQUEUE_FAILURE}'
+ ),
+ self.captured_logs[2].message,
+ )
+
+ def test_log_drops_source_enqueue_failure_with_message(self):
+ """Tests log drops when source reports log drops."""
+ drop_count = 8
+ reason = 'Flux Capacitor exploded'
+ log_entries = log_pb2.LogEntries(
+ first_entry_sequence_id=0,
+ entries=[
+ _create_random_log_entry(),
+ _create_random_log_entry(),
+ _create_drop_count_message_log_entry(drop_count, reason),
+ _create_random_log_entry(),
+ ],
+ )
+ self.decoder.parse_log_entries_proto(log_entries)
+
+ # The drop message is placed where the log drops was detected.
+ self.assertEqual(
+ len(self.captured_logs),
+ 4,
+ msg=(
+ 'Unexpected number of messages received: '
+ f'{self._captured_logs_as_str()}'
+ ),
+ )
+ self.assertEqual(
+ f'Dropped {drop_count} logs due to {reason.lower()}',
+ self.captured_logs[2].message,
+ )
+
+
+if __name__ == '__main__':
+ main()
diff --git a/pw_log/py/pw_log/__init__.py b/pw_log/py/pw_log/__init__.py
new file mode 100644
index 000000000..c3e1bdbd5
--- /dev/null
+++ b/pw_log/py/pw_log/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2023 The Pigweed Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
diff --git a/pw_log/py/pw_log/log_decoder.py b/pw_log/py/pw_log/log_decoder.py
new file mode 100644
index 000000000..c50846e50
--- /dev/null
+++ b/pw_log/py/pw_log/log_decoder.py
@@ -0,0 +1,413 @@
+# Copyright 2023 The Pigweed Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+"""Utils to decode logs."""
+
+from dataclasses import dataclass
+import datetime
+import logging
+import re
+from typing import Any, Callable, Dict, Optional
+
+from pw_log.proto import log_pb2
+import pw_log_tokenized
+import pw_status
+from pw_tokenizer import Detokenizer
+from pw_tokenizer.proto import decode_optionally_tokenized
+
+_LOG = logging.getLogger(__name__)
+
+
+@dataclass(frozen=True)
+class LogLineLevel:
+ """Tuple of line number and level packed in LogEntry."""
+
+ line: int
+ level: int
+
+
+class Log:
+ """A decoded, human-readable representation of a LogEntry.
+
+ Contains fields to represent a decoded pw_log/log.proto LogEntry message in
+ a human readable way.
+
+ Attributes:
+ message: The log message as a string.
+ level: A integer representing the log level, follows logging levels.
+ flags: An integer with the bit flags.
+ timestamp: A string representation of a timestamp.
+ module_name: The module name as a string.
+ thread_name: The thread name as a string.
+ source_name: The source name as a string.
+ file_and_line: The filepath and line as a string.
+ metadata_fields: Extra fields with string-string mapping.
+ """
+
+ _LOG_LEVEL_NAMES = {
+ logging.DEBUG: 'DBG',
+ logging.INFO: 'INF',
+ logging.WARNING: 'WRN',
+ logging.ERROR: 'ERR',
+ logging.CRITICAL: 'CRT',
+ # The logging module does not have a FATAL level. This module's log
+ # level values are 10 * PW_LOG_LEVEL values. PW_LOG_LEVEL_FATAL is 7.
+ 70: 'FTL',
+ }
+ _LEVEL_MASK = 0x7 # pylint: disable=C0103
+ _LINE_OFFSET = 3 # pylint: disable=C0103
+
+ def __init__(
+ self,
+ message: str = '',
+ level: int = logging.NOTSET,
+ flags: int = 0,
+ timestamp: str = '',
+ module_name: str = '',
+ thread_name: str = '',
+ source_name: str = '',
+ file_and_line: str = '',
+ metadata_fields: Optional[Dict[str, str]] = None,
+ ) -> None:
+ self.message = message
+ self.level = level # Value from logging levels.
+ self.flags = flags
+ self.timestamp = timestamp # A human readable value.
+ self.module_name = module_name
+ self.thread_name = thread_name
+ self.source_name = source_name
+ self.file_and_line = file_and_line
+ self.metadata_fields = dict()
+ if metadata_fields:
+ self.metadata_fields.update(metadata_fields)
+
+ def __eq__(self, other: Any) -> bool:
+ if not isinstance(other, Log):
+ return False
+
+ return (
+ self.message == other.message
+ and self.level == other.level
+ and self.flags == other.flags
+ and self.timestamp == other.timestamp
+ and self.module_name == other.module_name
+ and self.thread_name == other.thread_name
+ and self.source_name == other.source_name
+ and self.file_and_line == other.file_and_line
+ and self.metadata_fields == other.metadata_fields
+ )
+
+ def __repr__(self) -> str:
+ return self.__str__()
+
+ def __str__(self) -> str:
+ level_name = self._LOG_LEVEL_NAMES.get(self.level, '')
+ metadata = ' '.join(map(str, self.metadata_fields.values()))
+ return (
+ f'{level_name} [{self.source_name}] {self.module_name} '
+ f'{self.timestamp} {self.message} {self.file_and_line} '
+ f'{metadata}'
+ ).strip()
+
+ @staticmethod
+ def pack_line_level(line: int, logging_log_level: int) -> int:
+ """Packs the line and level values into an integer as done in LogEntry.
+
+ Args:
+ line: the line number
+ level: the logging level using logging levels.
+ Returns:
+ An integer with the two values bitpacked.
+ """
+ return (line << Log._LINE_OFFSET) | (
+ Log.logging_level_to_pw_log_level(logging_log_level)
+ & Log._LEVEL_MASK
+ )
+
+ @staticmethod
+ def unpack_line_level(packed_line_level: int) -> LogLineLevel:
+ """Unpacks the line and level values packed as done in LogEntry.
+
+ Args:
+ An integer with the two values bitpacked.
+ Returns:
+ line: the line number
+ level: the logging level using logging levels.
+ """
+ line = packed_line_level >> Log._LINE_OFFSET
+ # Convert to logging module level number.
+ level = Log.pw_log_level_to_logging_level(
+ packed_line_level & Log._LEVEL_MASK
+ )
+ return LogLineLevel(line=line, level=level)
+
+ @staticmethod
+ def pw_log_level_to_logging_level(pw_log_level: int) -> int:
+ """Maps a pw_log/levels.h value to Python logging level value."""
+ return pw_log_level * 10
+
+ @staticmethod
+ def logging_level_to_pw_log_level(logging_log_level: int) -> int:
+ """Maps a Python logging level value to a pw_log/levels.h value."""
+ return int(logging_log_level / 10)
+
+
+def pw_status_code_to_name(
+ message: str, status_pattern: str = 'pw::Status=([0-9]+)'
+) -> str:
+ """Replaces the pw::Status code number with the status name.
+
+ Searches for a matching pattern encapsulating the status code number and
+ replaces it with the status name. This is useful when logging the status
+ code instead of the string to save space.
+
+ Args:
+ message: The string to look for the status code.
+ status_pattern: The regex pattern describing the format encapsulating
+ the status code.
+ Returns:
+ The message string with the status name if found, else the original
+ string.
+ """
+ # Min and max values for Status.
+ max_status_value = max(status.value for status in pw_status.Status)
+ min_status_value = min(status.value for status in pw_status.Status)
+
+ def replacement_callback(match: re.Match) -> str:
+ status_code = int(match.group(1))
+ if min_status_value <= status_code <= max_status_value:
+ return pw_status.Status(status_code).name
+ return match.group(0)
+
+ return re.sub(
+ pattern=status_pattern, repl=replacement_callback, string=message
+ )
+
+
+def log_decoded_log(log: Log, logger: logging.Logger) -> None:
+ """Formats and saves the log information in a pw console compatible way.
+
+ Arg:
+ logger: The logger to emit the log information to.
+ """
+ # Fields used for pw console table view.
+ log.metadata_fields['module'] = log.module_name
+ log.metadata_fields['source_name'] = log.source_name
+ log.metadata_fields['timestamp'] = log.timestamp
+ log.metadata_fields['msg'] = log.message
+ log.metadata_fields['file'] = log.file_and_line
+
+ logger.log(
+ log.level,
+ '[%s] %s %s %s %s',
+ log.source_name,
+ log.module_name,
+ log.timestamp,
+ log.message,
+ log.file_and_line,
+ extra=dict(extra_metadata_fields=log.metadata_fields),
+ )
+
+
+def timestamp_parser_ns_since_boot(timestamp: int) -> str:
+ """Decodes timestamp as nanoseconds since boot.
+
+ Args:
+ timestamp: The timestamp as an integer.
+ Returns:
+ A string representation of the timestamp.
+ """
+ return str(datetime.timedelta(seconds=timestamp / 1e9))[:-3]
+
+
+class LogStreamDecoder:
+ """Decodes an RPC stream of LogEntries packets.
+
+ Performs log drop detection on the stream of LogEntries proto messages.
+
+ Args:
+ decoded_log_handler: Callback called on each decoded log.
+ detokenizer: Detokenizes log messages if tokenized when provided.
+ source_name: Optional string to identify the logs source.
+ timestamp_parser: Optional timestamp parser number to a string.
+ message_parser: Optional message parser called after detokenization is
+ attempted on a log message.
+ """
+
+ DROP_REASON_LOSS_AT_TRANSPORT = 'loss at transport'
+ DROP_REASON_SOURCE_NOT_CONNECTED = 'source not connected'
+ DROP_REASON_SOURCE_ENQUEUE_FAILURE = 'enqueue failure at source'
+
+ def __init__(
+ self,
+ decoded_log_handler: Callable[[Log], None],
+ detokenizer: Optional[Detokenizer] = None,
+ source_name: str = '',
+ timestamp_parser: Optional[Callable[[int], str]] = None,
+ message_parser: Optional[Callable[[str], str]] = None,
+ ):
+ self.decoded_log_handler = decoded_log_handler
+ self.detokenizer = detokenizer
+ self.source_name = source_name
+ self.timestamp_parser = timestamp_parser
+ self.message_parser = message_parser
+ self._expected_log_sequence_id = 0
+
+ def parse_log_entries_proto(
+ self, log_entries_proto: log_pb2.LogEntries
+ ) -> None:
+ """Parses each LogEntry in log_entries_proto.
+
+ Args:
+ log_entry_proto: A LogEntry message proto.
+ Returns:
+ A Log object with the decoded log_entry_proto.
+ """
+ has_received_logs = self._expected_log_sequence_id > 0
+ dropped_log_count = self._calculate_dropped_logs(log_entries_proto)
+ if dropped_log_count > 0:
+ reason = (
+ self.DROP_REASON_LOSS_AT_TRANSPORT
+ if has_received_logs
+ else self.DROP_REASON_SOURCE_NOT_CONNECTED
+ )
+ self.decoded_log_handler(
+ self._handle_log_drop_count(dropped_log_count, reason)
+ )
+ elif dropped_log_count < 0:
+ _LOG.error('Log sequence ID is smaller than expected')
+
+ for i, log_entry_proto in enumerate(log_entries_proto.entries):
+ # Handle dropped count first.
+ if log_entry_proto.dropped:
+ # Avoid duplicating drop reports since the device will report
+ # a drop count due to a transmission failure, of the last
+ # attempted transmission only, in the first entry of the next
+ # successful transmission.
+ if i == 0 and dropped_log_count >= log_entry_proto.dropped:
+ continue
+ parsed_log = self.parse_log_entry_proto(log_entry_proto)
+ self.decoded_log_handler(parsed_log)
+
+ def parse_log_entry_proto(self, log_entry_proto: log_pb2.LogEntry) -> Log:
+ """Parses the log_entry_proto contents into a human readable format.
+
+ Args:
+ log_entry_proto: A LogEntry message proto.
+ Returns:
+ A Log object with the decoded log_entry_proto.
+ """
+ detokenized_message = self._decode_optionally_tokenized_field(
+ log_entry_proto.message
+ )
+ # Handle dropped count first.
+ if log_entry_proto.dropped:
+ drop_reason = self.DROP_REASON_SOURCE_ENQUEUE_FAILURE
+ if detokenized_message:
+ drop_reason = detokenized_message.lower()
+ return self._handle_log_drop_count(
+ log_entry_proto.dropped, drop_reason
+ )
+
+ # Parse message and metadata, if any, encoded in a key-value format as
+ # described in pw_log/log.proto LogEntry::message field.
+ message_and_metadata = pw_log_tokenized.FormatStringWithMetadata(
+ detokenized_message
+ )
+ module_name = self._decode_optionally_tokenized_field(
+ log_entry_proto.module
+ )
+ if not module_name:
+ module_name = message_and_metadata.module
+
+ line_level_tuple = Log.unpack_line_level(log_entry_proto.line_level)
+ file_and_line = self._decode_optionally_tokenized_field(
+ log_entry_proto.file
+ )
+ if not file_and_line:
+ # Set file name if found in the metadata.
+ if message_and_metadata.file is not None:
+ file_and_line = message_and_metadata.file
+ else:
+ file_and_line = ''
+
+ # Add line number to filepath if needed.
+ if line_level_tuple.line and ':' not in file_and_line:
+ file_and_line += f':{line_level_tuple.line}'
+ # Add extra log information avoiding duplicated data.
+ metadata_fields = {
+ k: v
+ for k, v in message_and_metadata.fields.items()
+ if k not in ['file', 'module', 'msg']
+ }
+
+ message = message_and_metadata.message
+ if self.message_parser:
+ message = self.message_parser(message)
+ if self.timestamp_parser:
+ timestamp = self.timestamp_parser(log_entry_proto.timestamp)
+ else:
+ timestamp = str(log_entry_proto.timestamp)
+ log = Log(
+ message=message,
+ level=line_level_tuple.level,
+ flags=log_entry_proto.flags,
+ timestamp=timestamp,
+ module_name=module_name,
+ thread_name=self._decode_optionally_tokenized_field(
+ log_entry_proto.thread
+ ),
+ source_name=self.source_name,
+ file_and_line=file_and_line,
+ metadata_fields=metadata_fields,
+ )
+
+ return log
+
+ def _handle_log_drop_count(self, drop_count: int, reason: str) -> Log:
+ log_word = 'logs' if drop_count > 1 else 'log'
+ log = Log(
+ message=f'Dropped {drop_count} {log_word} due to {reason}',
+ level=logging.WARNING,
+ source_name=self.source_name,
+ )
+ return log
+
+ def _calculate_dropped_logs(
+ self, log_entries_proto: log_pb2.LogEntries
+ ) -> int:
+ # Count log messages received that don't use the dropped field.
+ messages_received = sum(
+ 1 if not log_proto.dropped else 0
+ for log_proto in log_entries_proto.entries
+ )
+ dropped_log_count = (
+ log_entries_proto.first_entry_sequence_id
+ - self._expected_log_sequence_id
+ )
+ self._expected_log_sequence_id = (
+ log_entries_proto.first_entry_sequence_id + messages_received
+ )
+ return dropped_log_count
+
+ def _decode_optionally_tokenized_field(self, field: bytes) -> str:
+ """Decodes tokenized field into a printable string.
+ Args:
+ field: Bytes.
+ Returns:
+ A printable string.
+ """
+ if self.detokenizer:
+ return decode_optionally_tokenized(self.detokenizer, field)
+ return field.decode('utf-8')
diff --git a/pw_log/py/pyproject.toml b/pw_log/py/pyproject.toml
new file mode 100644
index 000000000..78668a709
--- /dev/null
+++ b/pw_log/py/pyproject.toml
@@ -0,0 +1,16 @@
+# Copyright 2023 The Pigweed Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+[build-system]
+requires = ['setuptools', 'wheel']
+build-backend = 'setuptools.build_meta'
diff --git a/pw_log/py/setup.cfg b/pw_log/py/setup.cfg
new file mode 100644
index 000000000..e157974ad
--- /dev/null
+++ b/pw_log/py/setup.cfg
@@ -0,0 +1,25 @@
+# Copyright 2023 The Pigweed Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+[metadata]
+name = pw_log
+version = 0.0.1
+author = Pigweed Authors
+author_email = pigweed-developers@googlegroups.com
+description = Tools for working with log protos
+
+[options]
+packages = pw_log
+
+[options.package_data]
+pw_log = py.typed
diff --git a/pw_log/tokenized_args.rst b/pw_log/tokenized_args.rst
new file mode 100644
index 000000000..d88910579
--- /dev/null
+++ b/pw_log/tokenized_args.rst
@@ -0,0 +1,76 @@
+.. _module-pw_log-tokenized-args:
+
+-----------------------
+Tokenized log arguments
+-----------------------
+The ``pw_log`` facade is intended to make the logging backend invisible to the
+user, but some backend features require additional frontend support,
+necessitating a break in the abstraction. One of these features is the logging
+of nested token arguments in tokenized logs, because only the user is able to
+know which log arguments can be tokens (see :ref:`module-pw_tokenizer` for
+context on tokenization, and :ref:`module-pw_log_tokenized` for an example of
+a tokenized logging backend). Arguments that have already been tokenized are
+just unsigned integers, and not all strings can be compile-time constants, so
+users must be provided with a way of manually marking token arguments.
+
+To this end, ``pw_log/tokenized_args.h`` aliases macros from ``pw_tokenizer``
+to enable logging nested tokens when the active backend uses tokenization.
+These alias macros revert to plain string logging otherwise, allowing projects
+to take advantage of nested token logging without breaking readable logs when
+the project is built with a different logging backend. To support logging
+nested token arguments, a ``pw_log`` backend must add an empty file
+``log_backend_uses_pw_tokenizer.h`` under ``public_overrides/pw_log_backend/``.
+
+Although the detokenizing backend accepts several different numeric bases, the
+macros currently only support formatting nested tokens in hexadecimal, which
+affects how the arguments appear in final logs if they cannot be detokenized
+for any reason. Undetokenized tokens will appear inline as hex integers
+prefixed with ``$#``, e.g. ``$#34d16466``.
+
+.. doxygendefine:: PW_LOG_TOKEN_TYPE
+.. doxygendefine:: PW_LOG_TOKEN
+.. doxygendefine:: PW_LOG_TOKEN_EXPR
+.. doxygendefine:: PW_LOG_TOKEN_FMT
+
+Example usage with inline string arguments:
+
+.. code-block:: cpp
+
+ #include "pw_log/log.h"
+ #include "pw_log/tokenized_args.h"
+
+ // bool active_
+ PW_LOG_INFO("Component is " PW_LOG_TOKEN_FMT(),
+ active_ ? PW_LOG_TOKEN_EXPR("active")
+ : PW_LOG_TOKEN_EXPR("idle"));
+
+Example usage with enums:
+
+.. code-block:: cpp
+
+ #include "pw_log/log.h"
+ #include "pw_log/tokenized_args.h"
+
+ namespace foo {
+
+ enum class Color { kRed, kGreen, kBlue };
+
+ PW_LOG_TOKEN_TYPE ColorToToken(Color color) {
+ switch (color) {
+ case Color::kRed:
+ return PW_LOG_TOKEN_EXPR("kRed");
+ case Color::kGreen:
+ return PW_LOG_TOKEN_EXPR("kGreen");
+ case Color::kBlue:
+ return PW_LOG_TOKEN_EXPR("kBlue");
+ default:
+ return PW_LOG_TOKEN_EXPR("kUnknown");
+ }
+ }
+
+ } // namespace foo
+
+ void LogColor(foo::Color color) {
+ PW_LOG("Color: [" PW_LOG_TOKEN_FMT() "]", color)
+ }
+