aboutsummaryrefslogtreecommitdiff
path: root/pw_log_tokenized
diff options
context:
space:
mode:
Diffstat (limited to 'pw_log_tokenized')
-rw-r--r--pw_log_tokenized/BUILD.bazel41
-rw-r--r--pw_log_tokenized/BUILD.gn95
-rw-r--r--pw_log_tokenized/CMakeLists.txt61
-rw-r--r--pw_log_tokenized/backend.cmake19
-rw-r--r--pw_log_tokenized/backend.gni18
-rw-r--r--pw_log_tokenized/base64_over_hdlc.cc13
-rw-r--r--pw_log_tokenized/compatibility.cc60
-rw-r--r--pw_log_tokenized/docs.rst77
-rw-r--r--pw_log_tokenized/log_tokenized.cc34
-rw-r--r--pw_log_tokenized/log_tokenized_test.cc45
-rw-r--r--pw_log_tokenized/log_tokenized_test_c.c16
-rw-r--r--pw_log_tokenized/metadata_test.cc49
-rw-r--r--pw_log_tokenized/public/pw_log_tokenized/config.h12
-rw-r--r--pw_log_tokenized/public/pw_log_tokenized/handler.h31
-rw-r--r--pw_log_tokenized/public/pw_log_tokenized/log_tokenized.h53
-rw-r--r--pw_log_tokenized/public/pw_log_tokenized/metadata.h76
-rw-r--r--pw_log_tokenized/py/BUILD.bazel34
-rw-r--r--pw_log_tokenized/py/BUILD.gn1
-rw-r--r--pw_log_tokenized/py/format_string_test.py4
-rw-r--r--pw_log_tokenized/py/metadata_test.py21
-rw-r--r--pw_log_tokenized/py/pw_log_tokenized/__init__.py49
21 files changed, 632 insertions, 177 deletions
diff --git a/pw_log_tokenized/BUILD.bazel b/pw_log_tokenized/BUILD.bazel
index 5ac34a618..abcf0f1de 100644
--- a/pw_log_tokenized/BUILD.bazel
+++ b/pw_log_tokenized/BUILD.bazel
@@ -35,27 +35,64 @@ pw_cc_library(
"public_overrides",
],
deps = [
+ "//pw_log:facade",
"//pw_tokenizer",
],
)
pw_cc_library(
name = "pw_log_tokenized",
+ srcs = ["log_tokenized.cc"],
deps = [
+ ":handler",
":headers",
"//pw_log:facade",
],
)
pw_cc_library(
+ name = "handler_facade",
+ hdrs = ["public/pw_log_tokenized/handler.h"],
+ includes = ["public"],
+ deps = ["//pw_preprocessor"],
+)
+
+pw_cc_library(
+ name = "handler",
+ deps = [
+ ":handler_facade",
+ "@pigweed_config//:pw_log_tokenized_handler_backend",
+ ],
+)
+
+# There is no default backend for now.
+pw_cc_library(
+ name = "backend_multiplexer",
+ visibility = ["@pigweed_config//:__pkg__"],
+)
+
+# The compatibility library is not needed in Bazel.
+pw_cc_library(
+ name = "compatibility",
+ srcs = ["compatibility.cc"],
+ visibility = ["//visibility:private"],
+ deps = [
+ ":handler_facade",
+ "//pw_tokenizer",
+ "//pw_tokenizer:global_handler_with_payload",
+ ],
+)
+
+pw_cc_library(
name = "base64_over_hdlc",
srcs = ["base64_over_hdlc.cc"],
hdrs = ["public/pw_log_tokenized/base64_over_hdlc.h"],
includes = ["public"],
deps = [
- "//pw_hdlc:encoder",
+ ":handler_facade",
+ "//pw_hdlc",
+ "//pw_stream:sys_io_stream",
"//pw_tokenizer:base64",
- "//pw_tokenizer:global_handler_with_payload.facade",
],
)
diff --git a/pw_log_tokenized/BUILD.gn b/pw_log_tokenized/BUILD.gn
index 21096734f..3e2b16181 100644
--- a/pw_log_tokenized/BUILD.gn
+++ b/pw_log_tokenized/BUILD.gn
@@ -14,10 +14,12 @@
import("//build_overrides/pigweed.gni")
+import("$dir_pw_build/facade.gni")
import("$dir_pw_build/module_config.gni")
import("$dir_pw_build/target_types.gni")
import("$dir_pw_docgen/docs.gni")
import("$dir_pw_log/backend.gni")
+import("$dir_pw_log_tokenized/backend.gni")
import("$dir_pw_tokenizer/backend.gni")
import("$dir_pw_unit_test/test.gni")
@@ -40,19 +42,84 @@ config("backend_config") {
# This target provides the backend for pw_log.
pw_source_set("pw_log_tokenized") {
- public_configs = [
- ":backend_config",
- ":public_include_path",
+ public_configs = [ ":backend_config" ]
+ public_deps = [
+ ":handler.facade", # Depend on the facade to avoid circular dependencies.
+ ":headers",
]
+ public = [ "public_overrides/pw_log_backend/log_backend.h" ]
+
+ sources = [ "log_tokenized.cc" ]
+}
+
+config("backwards_compatibility_config") {
+ defines = [ "_PW_LOG_TOKENIZED_GLOBAL_HANDLER_BACKWARDS_COMPAT" ]
+ visibility = [ ":*" ]
+}
+
+pw_source_set("headers") {
+ visibility = [ ":*" ]
+ public_configs = [ ":public_include_path" ]
public_deps = [
":config",
":metadata",
- "$dir_pw_tokenizer:global_handler_with_payload.facade",
+
+ # TODO(hepler): Remove this dependency when all projects have migrated to
+ # the new pw_log_tokenized handler.
+ "$dir_pw_tokenizer:global_handler_with_payload",
+ dir_pw_preprocessor,
+ dir_pw_tokenizer,
]
- public = [
- "public/pw_log_tokenized/log_tokenized.h",
- "public_overrides/pw_log_backend/log_backend.h",
+ public = [ "public/pw_log_tokenized/log_tokenized.h" ]
+}
+
+# The old pw_tokenizer_GLOBAL_HANDLER_WITH_PAYLOAD_BACKEND backend may still be
+# in use by projects that have not switched to the new pw_log_tokenized facade.
+# Use the old backend as a stand-in for the new backend if it is set.
+_old_backend_is_set = pw_tokenizer_GLOBAL_HANDLER_WITH_PAYLOAD_BACKEND != ""
+_new_backend_is_set = pw_log_tokenized_HANDLER_BACKEND != ""
+
+pw_facade("handler") {
+ public_configs = [ ":public_include_path" ]
+ public_deps = [
+ # TODO(hepler): Remove this dependency when all projects have migrated to
+ # the new pw_log_tokenized handler.
+ "$dir_pw_tokenizer:global_handler_with_payload",
+ dir_pw_preprocessor,
]
+
+ public = [ "public/pw_log_tokenized/handler.h" ]
+
+ # If the global handler backend is set, redirect the new facade to the old
+ # facade. If no backend is set, the old facade may still be in use through
+ # link deps, so provide the compatibility layer.
+ #
+ # TODO(hepler): Remove these backwards compatibility workarounds when projects
+ # have migrated.
+ if (_old_backend_is_set || (!_old_backend_is_set && !_new_backend_is_set)) {
+ assert(pw_log_tokenized_HANDLER_BACKEND == "",
+ "pw_tokenizer_GLOBAL_HANDLER_WITH_PAYLOAD_BACKEND is deprecated; " +
+ "only pw_log_tokenized_HANDLER_BACKEND should be set")
+
+ backend = pw_tokenizer_GLOBAL_HANDLER_WITH_PAYLOAD_BACKEND
+
+ # There is only one pw_log_tokenized backend in Pigweed, and it has been
+ # updated to the new API.
+ if (_old_backend_is_set &&
+ get_label_info(pw_tokenizer_GLOBAL_HANDLER_WITH_PAYLOAD_BACKEND,
+ "label_no_toolchain") ==
+ get_label_info(":base64_over_hdlc", "label_no_toolchain")) {
+ defines = [ "PW_LOG_TOKENIZED_BACKEND_USES_NEW_API=1" ]
+ } else {
+ defines = [ "PW_LOG_TOKENIZED_BACKEND_USES_NEW_API=0" ]
+ }
+
+ public_configs += [ ":backwards_compatibility_config" ]
+ deps = [ dir_pw_tokenizer ]
+ sources = [ "compatibility.cc" ]
+ } else {
+ backend = pw_log_tokenized_HANDLER_BACKEND
+ }
}
pw_source_set("metadata") {
@@ -74,10 +141,11 @@ pw_source_set("config") {
# pw_log is so ubiquitous. These deps are kept separate so they can be
# depended on from elsewhere.
pw_source_set("pw_log_tokenized.impl") {
- deps = [
- ":pw_log_tokenized",
- "$dir_pw_tokenizer:global_handler_with_payload",
- ]
+ deps = [ ":pw_log_tokenized" ]
+
+ if (_new_backend_is_set || _old_backend_is_set) {
+ deps += [ ":handler" ]
+ }
}
# This target provides a backend for pw_tokenizer that encodes tokenized logs as
@@ -87,10 +155,11 @@ pw_source_set("base64_over_hdlc") {
public = [ "public/pw_log_tokenized/base64_over_hdlc.h" ]
sources = [ "base64_over_hdlc.cc" ]
deps = [
+ ":handler.facade",
"$dir_pw_hdlc:encoder",
"$dir_pw_stream:sys_io_stream",
"$dir_pw_tokenizer:base64",
- "$dir_pw_tokenizer:global_handler_with_payload.facade",
+ dir_pw_span,
]
}
@@ -108,7 +177,7 @@ pw_test("log_tokenized_test") {
"pw_log_tokenized_private/test_utils.h",
]
deps = [
- ":pw_log_tokenized",
+ ":headers",
dir_pw_preprocessor,
]
}
diff --git a/pw_log_tokenized/CMakeLists.txt b/pw_log_tokenized/CMakeLists.txt
index d051f3b2a..28b3c96a9 100644
--- a/pw_log_tokenized/CMakeLists.txt
+++ b/pw_log_tokenized/CMakeLists.txt
@@ -13,10 +13,11 @@
# the License.
include($ENV{PW_ROOT}/pw_build/pigweed.cmake)
+include($ENV{PW_ROOT}/pw_log_tokenized/backend.cmake)
pw_add_module_config(pw_log_tokenized_CONFIG)
-pw_add_module_library(pw_log_tokenized.config
+pw_add_library(pw_log_tokenized.config INTERFACE
HEADERS
public/pw_log_tokenized/config.h
PUBLIC_INCLUDES
@@ -26,37 +27,45 @@ pw_add_module_library(pw_log_tokenized.config
${pw_log_tokenized_CONFIG}
)
-pw_add_module_library(pw_log_tokenized
- IMPLEMENTS_FACADES
- pw_log
+pw_add_library(pw_log_tokenized STATIC
HEADERS
public/pw_log_tokenized/log_tokenized.h
public_overrides/pw_log_backend/log_backend.h
PUBLIC_INCLUDES
public
+ public_overrides
PUBLIC_DEPS
pw_log_tokenized.config
+ pw_log_tokenized.handler
pw_log_tokenized.metadata
pw_tokenizer
- PRIVATE_DEPS
- pw_tokenizer.global_handler_with_payload
+ SOURCES
+ log_tokenized.cc
)
-pw_add_module_library(pw_log_tokenized.metadata
+pw_add_library(pw_log_tokenized.metadata INTERFACE
HEADERS
public/pw_log_tokenized/metadata.h
PUBLIC_INCLUDES
public
PUBLIC_DEPS
- pw_log.facade
pw_log_tokenized.config
)
+pw_add_facade(pw_log_tokenized.handler INTERFACE
+ BACKEND
+ pw_log_tokenized.handler_BACKEND
+ HEADERS
+ public/pw_log_tokenized/handler.h
+ PUBLIC_INCLUDES
+ public
+ PUBLIC_DEPS
+ pw_preprocessor
+)
+
# This target provides a backend for pw_tokenizer that encodes tokenized logs as
# Base64, encodes them into HDLC frames, and writes them over sys_io.
-pw_add_module_library(pw_log_tokenized.base64_over_hdlc
- IMPLEMENTS_FACADES
- pw_tokenizer.global_handler_with_payload
+pw_add_library(pw_log_tokenized.base64_over_hdlc STATIC
HEADERS
public/pw_log_tokenized/base64_over_hdlc.h
PUBLIC_INCLUDES
@@ -65,27 +74,31 @@ pw_add_module_library(pw_log_tokenized.base64_over_hdlc
base64_over_hdlc.cc
PRIVATE_DEPS
pw_hdlc.encoder
+ pw_log_tokenized.handler
+ pw_span
pw_stream.sys_io_stream
pw_tokenizer.base64
)
-pw_add_test(pw_log_tokenized.log_tokenized_test
- SOURCES
- log_tokenized_test.cc
- log_tokenized_test_c.c
- pw_log_tokenized_private/test_utils.h
- DEPS
- pw_log_tokenized
- pw_preprocessor
- GROUPS
- modules
- pw_log_tokenized
-)
+if(NOT "${pw_tokenizer.global_handler_with_payload_BACKEND}" STREQUAL "")
+ pw_add_test(pw_log_tokenized.log_tokenized_test
+ SOURCES
+ log_tokenized_test.cc
+ log_tokenized_test_c.c
+ pw_log_tokenized_private/test_utils.h
+ PRIVATE_DEPS
+ pw_log_tokenized
+ pw_preprocessor
+ GROUPS
+ modules
+ pw_log_tokenized
+ )
+endif()
pw_add_test(pw_log_tokenized.metadata_test
SOURCES
metadata_test.cc
- DEPS
+ PRIVATE_DEPS
pw_log_tokenized.metadata
GROUPS
modules
diff --git a/pw_log_tokenized/backend.cmake b/pw_log_tokenized/backend.cmake
new file mode 100644
index 000000000..5218566b9
--- /dev/null
+++ b/pw_log_tokenized/backend.cmake
@@ -0,0 +1,19 @@
+# Copyright 2023 The Pigweed Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+include_guard(GLOBAL)
+
+include($ENV{PW_ROOT}/pw_build/pigweed.cmake)
+
+# Backend for the pw_log_tokenized handler.
+pw_add_backend_variable(pw_log_tokenized.handler_BACKEND)
diff --git a/pw_log_tokenized/backend.gni b/pw_log_tokenized/backend.gni
new file mode 100644
index 000000000..afa71ed51
--- /dev/null
+++ b/pw_log_tokenized/backend.gni
@@ -0,0 +1,18 @@
+# Copyright 2023 The Pigweed Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+declare_args() {
+ # Backend for the pw_log_tokenized log handler.
+ pw_log_tokenized_HANDLER_BACKEND = ""
+}
diff --git a/pw_log_tokenized/base64_over_hdlc.cc b/pw_log_tokenized/base64_over_hdlc.cc
index f7dc0e0fe..29e925ab7 100644
--- a/pw_log_tokenized/base64_over_hdlc.cc
+++ b/pw_log_tokenized/base64_over_hdlc.cc
@@ -17,12 +17,11 @@
#include "pw_log_tokenized/base64_over_hdlc.h"
-#include <span>
-
#include "pw_hdlc/encoder.h"
+#include "pw_log_tokenized/handler.h"
+#include "pw_span/span.h"
#include "pw_stream/sys_io_stream.h"
#include "pw_tokenizer/base64.h"
-#include "pw_tokenizer/tokenize_to_global_handler_with_payload.h"
namespace pw::log_tokenized {
namespace {
@@ -32,19 +31,19 @@ stream::SysIoWriter writer;
} // namespace
// Base64-encodes tokenized logs and writes them to pw::sys_io as HDLC frames.
-extern "C" void pw_tokenizer_HandleEncodedMessageWithPayload(
- pw_tokenizer_Payload, // TODO(hepler): Use the metadata for filtering.
+extern "C" void pw_log_tokenized_HandleLog(
+ uint32_t, // TODO(hepler): Use the metadata for filtering.
const uint8_t log_buffer[],
size_t size_bytes) {
// Encode the tokenized message as Base64.
char base64_buffer[tokenizer::kDefaultBase64EncodedBufferSize];
const size_t base64_bytes = tokenizer::PrefixedBase64Encode(
- std::span(log_buffer, size_bytes), base64_buffer);
+ span(log_buffer, size_bytes), base64_buffer);
base64_buffer[base64_bytes] = '\0';
// HDLC-encode the Base64 string via a SysIoWriter.
hdlc::WriteUIFrame(PW_LOG_TOKENIZED_BASE64_LOG_HDLC_ADDRESS,
- std::as_bytes(std::span(base64_buffer, base64_bytes)),
+ as_bytes(span(base64_buffer, base64_bytes)),
writer);
}
diff --git a/pw_log_tokenized/compatibility.cc b/pw_log_tokenized/compatibility.cc
new file mode 100644
index 000000000..54060dbc7
--- /dev/null
+++ b/pw_log_tokenized/compatibility.cc
@@ -0,0 +1,60 @@
+// Copyright 2023 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+// If the project is still using pw_tokenizer's global handler with payload
+// facade, then define its functions as used by pw_log_tokenized.
+
+#include <cstdarg>
+
+#include "pw_log_tokenized/handler.h"
+#include "pw_preprocessor/compiler.h"
+#include "pw_tokenizer/encode_args.h"
+#include "pw_tokenizer/tokenize_to_global_handler_with_payload.h"
+
+// If the new API is in use, define pw_tokenizer_HandleEncodedMessageWithPayload
+// to redirect to it, in case there are any direct calls to it. Only projects
+// that use the base64_over_hdlc backend will have been updated to the new API.
+#if PW_LOG_TOKENIZED_BACKEND_USES_NEW_API
+
+extern "C" void pw_tokenizer_HandleEncodedMessageWithPayload(
+ uint32_t metadata, const uint8_t encoded_message[], size_t size_bytes) {
+ pw_log_tokenized_HandleLog(metadata, encoded_message, size_bytes);
+}
+
+#else // If the new API is not in use, implement it to redirect to the old API.
+
+extern "C" void pw_log_tokenized_HandleLog(uint32_t metadata,
+ const uint8_t encoded_message[],
+ size_t size_bytes) {
+ pw_tokenizer_HandleEncodedMessageWithPayload(
+ metadata, encoded_message, size_bytes);
+}
+
+#endif // PW_LOG_TOKENIZED_BACKEND_USES_NEW_API
+
+// Implement the global tokenized log handler function, which is identical
+// This function is the same as _pw_log_tokenized_EncodeTokenizedLog().
+extern "C" void _pw_tokenizer_ToGlobalHandlerWithPayload(
+ uint32_t metadata,
+ pw_tokenizer_Token token,
+ pw_tokenizer_ArgTypes types,
+ ...) {
+ va_list args;
+ va_start(args, types);
+ pw::tokenizer::EncodedMessage<> encoded_message(token, types, args);
+ va_end(args);
+
+ pw_log_tokenized_HandleLog(
+ metadata, encoded_message.data_as_uint8(), encoded_message.size());
+}
diff --git a/pw_log_tokenized/docs.rst b/pw_log_tokenized/docs.rst
index 1aa626765..24a6d8d4a 100644
--- a/pw_log_tokenized/docs.rst
+++ b/pw_log_tokenized/docs.rst
@@ -9,19 +9,19 @@ connects ``pw_log`` to ``pw_tokenizer``.
C++ backend
===========
``pw_log_tokenized`` provides a backend for ``pw_log`` that tokenizes log
-messages with the ``pw_tokenizer`` module. By default, log messages are
-tokenized with the ``PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD`` macro.
-The log level, 16-bit tokenized module name, and flags bits are passed through
-the payload argument. The macro eventually passes logs to the
-``pw_tokenizer_HandleEncodedMessageWithPayload`` function, which must be
-implemented by the application.
+messages with the ``pw_tokenizer`` module. The log level, 16-bit tokenized
+module name, and flags bits are passed through the payload argument. The macro
+eventually passes logs to the :c:func:`pw_log_tokenized_HandleLog` function,
+which must be implemented by the application.
+
+.. doxygenfunction:: pw_log_tokenized_HandleLog
Example implementation:
.. code-block:: cpp
- extern "C" void pw_tokenizer_HandleEncodedMessageWithPayload(
- pw_tokenizer_Payload payload, const uint8_t message[], size_t size) {
+ extern "C" void pw_log_tokenized_HandleLog(
+ uint32_t payload, const uint8_t message[], size_t size) {
// The metadata object provides the log level, module token, and flags.
// These values can be recorded and used for runtime filtering.
pw::log_tokenized::Metadata metadata(payload);
@@ -128,29 +128,52 @@ bits allocated is excluded from the log metadata.
Defaults to 16, which gives a ~1% probability of a collision with 37 module
names.
-Using a custom macro
---------------------
-Applications may use their own macro instead of
-``PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD`` by setting the
-``PW_LOG_TOKENIZED_ENCODE_MESSAGE`` config macro. This macro should take
-arguments equivalent to ``PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD``:
-
-.. c:macro:: PW_LOG_TOKENIZED_ENCODE_MESSAGE(log_metadata, message, ...)
+Creating and reading Metadata payloads
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+``pw_log_tokenized`` provides a C++ class to facilitate the creation and
+interpretation of packed log metadata payloads.
- :param log_metadata:
+.. doxygenclass:: pw::log_tokenized::GenericMetadata
+.. doxygentypedef:: pw::log_tokenized::Metadata
- Packed metadata for the log message. See the Metadata_ class for how to
- unpack the details.
+The following example shows that a ``Metadata`` object can be created from a
+``uint32_t`` log metadata payload.
- :type log_metadata: pw_tokenizer_Payload
+.. code-block:: cpp
- :param message: The log message format string (untokenized)
- :type message: :c:texpr:`const char*`
+ extern "C" void pw_log_tokenized_HandleLog(
+ uint32_t payload,
+ const uint8_t message[],
+ size_t size_bytes) {
+ pw::log_tokenized::Metadata metadata = payload;
+ // Check the log level to see if this log is a crash.
+ if (metadata.level() == PW_LOG_LEVEL_FATAL) {
+ HandleCrash(metadata, pw::ConstByteSpan(
+ reinterpret_cast<const std::byte*>(message), size_bytes));
+ PW_UNREACHABLE;
+ }
+ // ...
+ }
+
+It's also possible to get a ``uint32_t`` representation of a ``Metadata``
+object:
- .. _Metadata: https://cs.opensource.google/pigweed/pigweed/+/HEAD:pw_log_tokenized/public/pw_log_tokenized/log_tokenized.h;l=113
+.. code-block:: cpp
-For instructions on how to implement a custom tokenization macro, see
-:ref:`module-pw_tokenizer-custom-macro`.
+ // Logs an explicitly created string token.
+ void LogToken(uint32_t token, int level, int line_number, int module) {
+ const uint32_t payload =
+ log_tokenized::Metadata(
+ level, module, PW_LOG_FLAGS, line_number)
+ .value();
+ std::array<std::byte, sizeof(token)> token_buffer =
+ pw::bytes::CopyInOrder(endian::little, token);
+
+ pw_log_tokenized_HandleLog(
+ payload,
+ reinterpret_cast<const uint8_t*>(token_buffer.data()),
+ token_buffer.size());
+ }
Build targets
-------------
@@ -158,8 +181,8 @@ The GN build for ``pw_log_tokenized`` has two targets: ``pw_log_tokenized`` and
``log_backend``. The ``pw_log_tokenized`` target provides the
``pw_log_tokenized/log_tokenized.h`` header. The ``log_backend`` target
implements the backend for the ``pw_log`` facade. ``pw_log_tokenized`` invokes
-the ``pw_tokenizer:global_handler_with_payload`` facade, which must be
-implemented by the user of ``pw_log_tokenized``.
+the ``pw_log_tokenized:handler`` facade, which must be implemented by the user
+of ``pw_log_tokenized``.
Python package
==============
diff --git a/pw_log_tokenized/log_tokenized.cc b/pw_log_tokenized/log_tokenized.cc
new file mode 100644
index 000000000..25c00d90a
--- /dev/null
+++ b/pw_log_tokenized/log_tokenized.cc
@@ -0,0 +1,34 @@
+// Copyright 2023 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+
+#include "pw_log_tokenized/log_tokenized.h"
+
+#include <cstdarg>
+
+#include "pw_log_tokenized/handler.h"
+#include "pw_tokenizer/encode_args.h"
+
+extern "C" void _pw_log_tokenized_EncodeTokenizedLog(
+ uint32_t metadata,
+ pw_tokenizer_Token token,
+ pw_tokenizer_ArgTypes types,
+ ...) {
+ va_list args;
+ va_start(args, types);
+ pw::tokenizer::EncodedMessage<> encoded_message(token, types, args);
+ va_end(args);
+
+ pw_log_tokenized_HandleLog(
+ metadata, encoded_message.data_as_uint8(), encoded_message.size());
+}
diff --git a/pw_log_tokenized/log_tokenized_test.cc b/pw_log_tokenized/log_tokenized_test.cc
index e926749f6..a8d4759b0 100644
--- a/pw_log_tokenized/log_tokenized_test.cc
+++ b/pw_log_tokenized/log_tokenized_test.cc
@@ -34,7 +34,8 @@ namespace pw::log_tokenized {
namespace {
TEST(LogTokenized, FormatString) {
- PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(63, 1023, "hello %d", 1);
+ PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(
+ 63, PW_LOG_MODULE_NAME, 1023, "hello %d", 1);
EXPECT_STREQ(last_log.format_string,
"■msg♦hello %d■module♦log module name!■file♦" __FILE__);
}
@@ -49,10 +50,11 @@ TEST(LogTokenized, LogMetadata_LevelTooLarge_Clamps) {
EXPECT_EQ(metadata.level(), 7u);
EXPECT_EQ(metadata.flags(), 0u);
EXPECT_EQ(metadata.module(), kModuleToken);
- EXPECT_TRUE(metadata.line_number() == 55u || metadata.line_number() == 45u);
+ EXPECT_EQ(metadata.line_number(), 1000u);
};
- PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(8, 0, "hello");
+#line 1000
+ PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(8, PW_LOG_MODULE_NAME, 0, "");
check_metadata();
pw_log_tokenized_Test_LogMetadata_LevelTooLarge_Clamps();
@@ -65,10 +67,15 @@ TEST(LogTokenized, LogMetadata_TooManyFlags_Truncates) {
EXPECT_EQ(metadata.level(), 1u);
EXPECT_EQ(metadata.flags(), 0b11u);
EXPECT_EQ(metadata.module(), kModuleToken);
- EXPECT_TRUE(metadata.line_number() == 71u || metadata.line_number() == 49u);
+ EXPECT_EQ(metadata.line_number(), 1100u);
};
- PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(1, 0xFFFFFFFF, "hello");
+ // Keep statements on a single line, since GCC and Clang disagree about which
+ // line number to assign to multi-line macros.
+ // clang-format off
+#line 1100
+ PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(1, PW_LOG_MODULE_NAME, 0xFFFFFFFF, "hello");
+ // clang-format on
check_metadata();
pw_log_tokenized_Test_LogMetadata_TooManyFlags_Truncates();
@@ -82,10 +89,13 @@ TEST(LogTokenized, LogMetadata_VariousValues) {
EXPECT_EQ(metadata.flags(), 3u);
EXPECT_EQ(metadata.module(), kModuleToken);
EXPECT_EQ(last_log.arg_count, 1u);
- EXPECT_TRUE(metadata.line_number() == 88u || metadata.line_number() == 53u);
+ EXPECT_EQ(metadata.line_number(), 1200u);
};
- PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(6, 3, "hello%s", "?");
+ // clang-format off
+#line 1200
+ PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(6, PW_LOG_MODULE_NAME, 3, "hello%s", "?");
+ // clang-format on
check_metadata();
pw_log_tokenized_Test_LogMetadata_LogMetadata_VariousValues();
@@ -99,11 +109,11 @@ TEST(LogTokenized, LogMetadata_Zero) {
EXPECT_EQ(metadata.flags(), 0u);
EXPECT_EQ(metadata.module(), kModuleToken);
EXPECT_EQ(last_log.arg_count, 0u);
- EXPECT_TRUE(metadata.line_number() == 106u ||
- metadata.line_number() == 57u);
+ EXPECT_EQ(metadata.line_number(), 1300u);
};
- PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(0, 0, "hello");
+#line 1300
+ PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(0, PW_LOG_MODULE_NAME, 0, "");
check_metadata();
pw_log_tokenized_Test_LogMetadata_LogMetadata_Zero();
@@ -112,27 +122,32 @@ TEST(LogTokenized, LogMetadata_Zero) {
TEST(LogTokenized, LogMetadata_MaxValues) {
#line 2047
- PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(7, 3, "hello %d", 1);
+ PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(7, "name", 3, "hello %d", 1);
Metadata metadata = Metadata(last_log.metadata);
EXPECT_EQ(metadata.line_number(), 2047u);
EXPECT_EQ(metadata.level(), 7u);
EXPECT_EQ(metadata.flags(), 3u);
- EXPECT_EQ(metadata.module(), kModuleToken);
+ EXPECT_EQ(metadata.module(),
+ PW_TOKENIZER_STRING_TOKEN("name") &
+ ((1u << PW_LOG_TOKENIZED_MODULE_BITS) - 1));
EXPECT_EQ(last_log.arg_count, 1u);
}
TEST(LogTokenized, LogMetadata_LineNumberTooLarge_IsZero) {
#line 2048 // At 11 bits, the largest representable line is 2047
- PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(7, 3, "hello %d", 1);
+ PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(
+ 7, PW_LOG_MODULE_NAME, 3, "hello %d", 1);
EXPECT_EQ(Metadata(last_log.metadata).line_number(), 0u);
#line 2049
- PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(7, 3, "hello %d", 1);
+ PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(
+ 7, PW_LOG_MODULE_NAME, 3, "hello %d", 1);
EXPECT_EQ(Metadata(last_log.metadata).line_number(), 0u);
#line 99999
- PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(7, 3, "hello %d", 1);
+ PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(
+ 7, PW_LOG_MODULE_NAME, 3, "hello %d", 1);
EXPECT_EQ(Metadata(last_log.metadata).line_number(), 0u);
}
diff --git a/pw_log_tokenized/log_tokenized_test_c.c b/pw_log_tokenized/log_tokenized_test_c.c
index c0b28b79b..81667a7e0 100644
--- a/pw_log_tokenized/log_tokenized_test_c.c
+++ b/pw_log_tokenized/log_tokenized_test_c.c
@@ -42,17 +42,25 @@ void pw_log_tokenized_CaptureArgs(uintptr_t payload,
// These functions correspond to tests in log_tokenized_test.cc. The tests call
// these functions and check the results.
void pw_log_tokenized_Test_LogMetadata_LevelTooLarge_Clamps(void) {
- PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(8, 0, "hello");
+#line 1000
+ PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(8, PW_LOG_MODULE_NAME, 0, "");
}
void pw_log_tokenized_Test_LogMetadata_TooManyFlags_Truncates(void) {
- PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(1, 0xFFFFFFFF, "hello");
+// clang-format off
+#line 1100
+ PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(1, PW_LOG_MODULE_NAME, 0xFFFFFFFF, "hello");
+ // clang-format on
}
void pw_log_tokenized_Test_LogMetadata_LogMetadata_VariousValues(void) {
- PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(6, 3, "hello%s", "?");
+// clang-format off
+#line 1200
+ PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(6, PW_LOG_MODULE_NAME, 3, "hello%s", "?");
+ // clang-format on
}
void pw_log_tokenized_Test_LogMetadata_LogMetadata_Zero(void) {
- PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(0, 0, "hello");
+#line 1300
+ PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD(0, PW_LOG_MODULE_NAME, 0, "");
}
diff --git a/pw_log_tokenized/metadata_test.cc b/pw_log_tokenized/metadata_test.cc
index 3a45ceeae..4d84c5aaf 100644
--- a/pw_log_tokenized/metadata_test.cc
+++ b/pw_log_tokenized/metadata_test.cc
@@ -20,7 +20,7 @@ namespace pw::log_tokenized {
namespace {
TEST(Metadata, NoLineBits) {
- using NoLineBits = internal::GenericMetadata<6, 0, 10, 16>;
+ using NoLineBits = GenericMetadata<6, 0, 10, 16>;
constexpr NoLineBits test1 = NoLineBits::Set<0, 0, 0>();
static_assert(test1.level() == 0);
@@ -42,7 +42,7 @@ TEST(Metadata, NoLineBits) {
}
TEST(Metadata, NoFlagBits) {
- using NoFlagBits = internal::GenericMetadata<3, 13, 0, 16>;
+ using NoFlagBits = GenericMetadata<3, 13, 0, 16>;
constexpr NoFlagBits test1 = NoFlagBits::Set<0, 0, 0, 0>();
static_assert(test1.level() == 0);
@@ -63,5 +63,50 @@ TEST(Metadata, NoFlagBits) {
static_assert(test3.line_number() == (1 << 13) - 1);
}
+TEST(Metadata, EncodedValue_Zero) {
+ constexpr Metadata test1 = Metadata::Set<0, 0, 0, 0>();
+ static_assert(test1.value() == 0);
+}
+
+TEST(Metadata, EncodedValue_Nonzero) {
+ constexpr size_t kExpectedLevel = 3;
+ constexpr size_t kExpectedLine = 2022;
+ constexpr size_t kExpectedFlags = 0b10;
+ constexpr size_t kExpectedModule = 1337;
+ constexpr size_t kExpectedValue =
+ (kExpectedLevel) | (kExpectedLine << PW_LOG_TOKENIZED_LEVEL_BITS) |
+ (kExpectedFlags << (PW_LOG_TOKENIZED_LEVEL_BITS +
+ PW_LOG_TOKENIZED_LINE_BITS)) |
+ (kExpectedModule << (PW_LOG_TOKENIZED_LEVEL_BITS +
+ PW_LOG_TOKENIZED_LINE_BITS +
+ PW_LOG_TOKENIZED_FLAG_BITS));
+ constexpr Metadata test = Metadata::
+ Set<kExpectedLevel, kExpectedModule, kExpectedFlags, kExpectedLine>();
+ static_assert(test.value() == kExpectedValue);
+}
+
+TEST(Metadata, EncodedValue_NonzeroConstructor) {
+ constexpr size_t kExpectedLevel = 1;
+ constexpr size_t kExpectedLine = 99;
+ constexpr size_t kExpectedFlags = 0b11;
+ constexpr size_t kExpectedModule = 8900;
+ constexpr size_t kExpectedValue =
+ (kExpectedLevel) | (kExpectedLine << PW_LOG_TOKENIZED_LEVEL_BITS) |
+ (kExpectedFlags << (PW_LOG_TOKENIZED_LEVEL_BITS +
+ PW_LOG_TOKENIZED_LINE_BITS)) |
+ (kExpectedModule << (PW_LOG_TOKENIZED_LEVEL_BITS +
+ PW_LOG_TOKENIZED_LINE_BITS +
+ PW_LOG_TOKENIZED_FLAG_BITS));
+ constexpr Metadata test =
+ Metadata(kExpectedLevel, kExpectedModule, kExpectedFlags, kExpectedLine);
+ static_assert(test.value() == kExpectedValue);
+}
+
+TEST(Metadata, EncodedValue_Overflow) {
+ constexpr size_t kExpectedLevel = 144;
+ constexpr Metadata test = Metadata(kExpectedLevel, 0, 0, 0);
+ static_assert(test.value() == 0);
+}
+
} // namespace
} // namespace pw::log_tokenized
diff --git a/pw_log_tokenized/public/pw_log_tokenized/config.h b/pw_log_tokenized/public/pw_log_tokenized/config.h
index de3351026..647598177 100644
--- a/pw_log_tokenized/public/pw_log_tokenized/config.h
+++ b/pw_log_tokenized/public/pw_log_tokenized/config.h
@@ -41,10 +41,6 @@
#define PW_LOG_TOKENIZED_LEVEL_BITS PW_LOG_LEVEL_BITS
#endif // PW_LOG_TOKENIZED_LEVEL_BITS
-// Bits to allocate for the line number. Defaults to 11 (up to line 2047). If
-// the line number is too large to be represented by this field, line is
-// reported as 0.
-//
// Including the line number can slightly increase code size. Without the line
// number, the log metadata argument is the same for all logs with the same
// level and flags. With the line number, each metadata value is unique and must
@@ -72,11 +68,3 @@
static_assert((PW_LOG_TOKENIZED_LEVEL_BITS + PW_LOG_TOKENIZED_LINE_BITS +
PW_LOG_TOKENIZED_FLAG_BITS + PW_LOG_TOKENIZED_MODULE_BITS) == 32,
"Log metadata fields must use 32 bits");
-
-// The macro to use to tokenize the log and its arguments. Defaults to
-// PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD. Projects may define their own
-// version of this macro that uses a different underlying function, if desired.
-#ifndef PW_LOG_TOKENIZED_ENCODE_MESSAGE
-#define PW_LOG_TOKENIZED_ENCODE_MESSAGE \
- PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD
-#endif // PW_LOG_TOKENIZED_ENCODE_MESSAGE
diff --git a/pw_log_tokenized/public/pw_log_tokenized/handler.h b/pw_log_tokenized/public/pw_log_tokenized/handler.h
new file mode 100644
index 000000000..40ac0f7d8
--- /dev/null
+++ b/pw_log_tokenized/public/pw_log_tokenized/handler.h
@@ -0,0 +1,31 @@
+// Copyright 2023 The Pigweed Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may not
+// use this file except in compliance with the License. You may obtain a copy of
+// the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+// License for the specific language governing permissions and limitations under
+// the License.
+#pragma once
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "pw_preprocessor/util.h"
+
+PW_EXTERN_C_START
+
+/// Function that is called for each log message. The metadata `uint32_t` can be
+/// converted to a @cpp_type{pw::log_tokenized::Metadata}. The message is passed
+/// as a pointer to a buffer and a size. The pointer is invalidated after this
+/// function returns, so the buffer must be copied.
+void pw_log_tokenized_HandleLog(uint32_t metadata,
+ const uint8_t encoded_message[],
+ size_t size_bytes);
+
+PW_EXTERN_C_END
diff --git a/pw_log_tokenized/public/pw_log_tokenized/log_tokenized.h b/pw_log_tokenized/public/pw_log_tokenized/log_tokenized.h
index 110d5425f..61454a741 100644
--- a/pw_log_tokenized/public/pw_log_tokenized/log_tokenized.h
+++ b/pw_log_tokenized/public/pw_log_tokenized/log_tokenized.h
@@ -16,30 +16,36 @@
#include <stdint.h>
#include "pw_log_tokenized/config.h"
-#include "pw_tokenizer/tokenize_to_global_handler_with_payload.h"
+#include "pw_preprocessor/util.h"
+#include "pw_tokenizer/tokenize.h"
-// TODO(hepler): Remove this include.
+// TODO(hepler): Remove these includes.
#ifdef __cplusplus
#include "pw_log_tokenized/metadata.h"
#endif // __cplusplus
-// This macro implements PW_LOG using
-// PW_TOKENIZE_TO_GLOBAL_HANDLER_WITH_PAYLOAD or an equivalent alternate macro
-// provided by PW_LOG_TOKENIZED_ENCODE_MESSAGE. The log level, module token, and
-// flags are packed into the payload argument.
+#ifdef _PW_LOG_TOKENIZED_GLOBAL_HANDLER_BACKWARDS_COMPAT
+#include "pw_tokenizer/tokenize_to_global_handler_with_payload.h"
+#endif // _PW_LOG_TOKENIZED_GLOBAL_HANDLER_BACKWARDS_COMPAT
+
+#undef _PW_LOG_TOKENIZED_GLOBAL_HANDLER_BACKWARDS_COMPAT
+
+// This macro implements PW_LOG using pw_tokenizer. Users must implement
+// pw_log_tokenized_HandleLog(uint32_t metadata, uint8_t* buffer, size_t size).
+// The log level, module token, and flags are packed into the metadata argument.
//
// Two strings are tokenized in this macro:
//
// - The log format string, tokenized in the default tokenizer domain.
-// - PW_LOG_MODULE_NAME, masked to 16 bits and tokenized in the
+// - Log module name, masked to 16 bits and tokenized in the
// "pw_log_module_names" tokenizer domain.
//
-// To use this macro, implement pw_tokenizer_HandleEncodedMessageWithPayload,
-// which is defined in pw_tokenizer/tokenize.h. The log metadata can be accessed
-// using pw::log_tokenized::Metadata. For example:
+// To use this macro, implement pw_log_tokenized_HandleLog(), which is defined
+// in pw_log_tokenized/handler.h. The log metadata can be accessed using
+// pw::log_tokenized::Metadata. For example:
//
-// extern "C" void pw_tokenizer_HandleEncodedMessageWithPayload(
-// pw_tokenizer_Payload payload, const uint8_t data[], size_t size) {
+// extern "C" void pw_log_tokenized_HandleLog(
+// uint32_t payload, const uint8_t data[], size_t size) {
// pw::log_tokenized::Metadata metadata(payload);
//
// if (metadata.level() >= kLogLevel && ModuleEnabled(metadata.module())) {
@@ -48,12 +54,12 @@
// }
//
#define PW_LOG_TOKENIZED_TO_GLOBAL_HANDLER_WITH_PAYLOAD( \
- level, flags, message, ...) \
+ level, module, flags, message, ...) \
do { \
_PW_TOKENIZER_CONST uintptr_t _pw_log_tokenized_module_token = \
PW_TOKENIZE_STRING_MASK("pw_log_module_names", \
((1u << PW_LOG_TOKENIZED_MODULE_BITS) - 1u), \
- PW_LOG_MODULE_NAME); \
+ module); \
const uintptr_t _pw_log_tokenized_level = level; \
PW_LOG_TOKENIZED_ENCODE_MESSAGE( \
(_PW_LOG_TOKENIZED_LEVEL(_pw_log_tokenized_level) | \
@@ -101,3 +107,22 @@
PW_LOG_TOKENIZED_LINE_BITS + \
PW_LOG_TOKENIZED_FLAG_BITS)))
#endif // PW_LOG_TOKENIZED_MODULE_BITS
+
+#define PW_LOG_TOKENIZED_ENCODE_MESSAGE(metadata, format, ...) \
+ do { \
+ PW_TOKENIZE_FORMAT_STRING( \
+ PW_TOKENIZER_DEFAULT_DOMAIN, UINT32_MAX, format, __VA_ARGS__); \
+ _pw_log_tokenized_EncodeTokenizedLog(metadata, \
+ _pw_tokenizer_token, \
+ PW_TOKENIZER_ARG_TYPES(__VA_ARGS__) \
+ PW_COMMA_ARGS(__VA_ARGS__)); \
+ } while (0)
+
+PW_EXTERN_C_START
+
+void _pw_log_tokenized_EncodeTokenizedLog(uint32_t metadata,
+ pw_tokenizer_Token token,
+ pw_tokenizer_ArgTypes types,
+ ...);
+
+PW_EXTERN_C_END
diff --git a/pw_log_tokenized/public/pw_log_tokenized/metadata.h b/pw_log_tokenized/public/pw_log_tokenized/metadata.h
index 86e6364bd..5d78e3b38 100644
--- a/pw_log_tokenized/public/pw_log_tokenized/metadata.h
+++ b/pw_log_tokenized/public/pw_log_tokenized/metadata.h
@@ -41,8 +41,17 @@ class BitField<T, 0, kShift> {
static constexpr T Shift(T) { return 0; }
};
+} // namespace internal
+
// This class, which is aliased to pw::log_tokenized::Metadata below, is used to
// access the log metadata packed into the tokenizer's payload argument.
+//
+/// `GenericMetadata` facilitates the creation and interpretation of packed
+/// log metadata payloads. The `GenericMetadata` class allows flags, log level,
+/// line number, and a module identifier to be packed into bit fields of
+/// configurable size.
+///
+/// Typically, the `Metadata` alias should be used instead.
template <unsigned kLevelBits,
unsigned kLineBits,
unsigned kFlagBits,
@@ -57,43 +66,62 @@ class GenericMetadata {
static_assert(flags < (1 << kFlagBits), "The flags are too large!");
static_assert(module < (1 << kModuleBits), "The module is too large!");
- return GenericMetadata(Level::Shift(log_level) | Module::Shift(module) |
- Flags::Shift(flags) | Line::Shift(line));
+ return GenericMetadata(BitsFromMetadata(log_level, module, flags, line));
}
- constexpr GenericMetadata(T value) : bits_(value) {}
+ /// Only use this constructor for creating metadata from runtime values. This
+ /// constructor is unable to warn at compilation when values will not fit in
+ /// the specified bit field widths.
+ constexpr GenericMetadata(T log_level, T module, T flags, T line)
+ : value_(BitsFromMetadata(log_level, module, flags, line)) {}
+
+ constexpr GenericMetadata(T value) : value_(value) {}
- // The log level of this message.
- constexpr T level() const { return Level::Get(bits_); }
+ /// The log level of this message.
+ constexpr T level() const { return Level::Get(value_); }
- // The line number of the log call. The first line in a file is 1. If the line
- // number is 0, it was too large to be stored.
- constexpr T line_number() const { return Line::Get(bits_); }
+ /// The line number of the log call. The first line in a file is 1. If the
+ /// line number is 0, it was too large to be stored.
+ constexpr T line_number() const { return Line::Get(value_); }
- // The flags provided to the log call.
- constexpr T flags() const { return Flags::Get(bits_); }
+ /// The flags provided to the log call.
+ constexpr T flags() const { return Flags::Get(value_); }
- // The 16 bit tokenized version of the module name (PW_LOG_MODULE_NAME).
- constexpr T module() const { return Module::Get(bits_); }
+ /// The 16-bit tokenized version of the module name
+ /// (@c_macro{PW_LOG_MODULE_NAME}).
+ constexpr T module() const { return Module::Get(value_); }
+
+ /// The underlying packed metadata.
+ constexpr T value() const { return value_; }
private:
- using Level = BitField<T, kLevelBits, 0>;
- using Line = BitField<T, kLineBits, kLevelBits>;
- using Flags = BitField<T, kFlagBits, kLevelBits + kLineBits>;
- using Module = BitField<T, kModuleBits, kLevelBits + kLineBits + kFlagBits>;
+ using Level = internal::BitField<T, kLevelBits, 0>;
+ using Line = internal::BitField<T, kLineBits, kLevelBits>;
+ using Flags = internal::BitField<T, kFlagBits, kLevelBits + kLineBits>;
+ using Module =
+ internal::BitField<T, kModuleBits, kLevelBits + kLineBits + kFlagBits>;
+
+ static constexpr T BitsFromMetadata(T log_level, T module, T flags, T line) {
+ return Level::Shift(log_level) | Module::Shift(module) |
+ Flags::Shift(flags) | Line::Shift(line);
+ }
- T bits_;
+ T value_;
static_assert(kLevelBits + kLineBits + kFlagBits + kModuleBits <=
- sizeof(bits_) * 8);
+ sizeof(value_) * 8);
};
-} // namespace internal
-
-using Metadata = internal::GenericMetadata<PW_LOG_TOKENIZED_LEVEL_BITS,
- PW_LOG_TOKENIZED_LINE_BITS,
- PW_LOG_TOKENIZED_FLAG_BITS,
- PW_LOG_TOKENIZED_MODULE_BITS>;
+/// The `Metadata` alias simplifies the bit field width templatization of
+/// `GenericMetadata` by pulling from this module's configuration options. In
+/// most cases, it's recommended to use `Metadata` to create or read metadata
+/// payloads.
+///
+/// A `Metadata` object can be created from a `uint32_t`.
+using Metadata = GenericMetadata<PW_LOG_TOKENIZED_LEVEL_BITS,
+ PW_LOG_TOKENIZED_LINE_BITS,
+ PW_LOG_TOKENIZED_FLAG_BITS,
+ PW_LOG_TOKENIZED_MODULE_BITS>;
} // namespace log_tokenized
} // namespace pw
diff --git a/pw_log_tokenized/py/BUILD.bazel b/pw_log_tokenized/py/BUILD.bazel
new file mode 100644
index 000000000..eca513e05
--- /dev/null
+++ b/pw_log_tokenized/py/BUILD.bazel
@@ -0,0 +1,34 @@
+# Copyright 2022 The Pigweed Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+py_library(
+ name = "pw_log_tokenized",
+ srcs = ["pw_log_tokenized/__init__.py"],
+ imports = ["."],
+ visibility = ["//visibility:public"],
+)
+
+py_test(
+ name = "format_string_test",
+ size = "small",
+ srcs = ["format_string_test.py"],
+ deps = [":pw_log_tokenized"],
+)
+
+py_test(
+ name = "metadata_test",
+ size = "small",
+ srcs = ["metadata_test.py"],
+ deps = [":pw_log_tokenized"],
+)
diff --git a/pw_log_tokenized/py/BUILD.gn b/pw_log_tokenized/py/BUILD.gn
index 8931cb2d2..c9e376eec 100644
--- a/pw_log_tokenized/py/BUILD.gn
+++ b/pw_log_tokenized/py/BUILD.gn
@@ -28,4 +28,5 @@ pw_python_package("py") {
"metadata_test.py",
]
pylintrc = "$dir_pigweed/.pylintrc"
+ mypy_ini = "$dir_pigweed/.mypy.ini"
}
diff --git a/pw_log_tokenized/py/format_string_test.py b/pw_log_tokenized/py/format_string_test.py
index af49bf200..cf04c788d 100644
--- a/pw_log_tokenized/py/format_string_test.py
+++ b/pw_log_tokenized/py/format_string_test.py
@@ -21,9 +21,11 @@ from pw_log_tokenized import FormatStringWithMetadata
class TestFormatStringWithMetadata(unittest.TestCase):
"""Tests extracting metadata from a pw_log_tokenized-style format string."""
+
def test_all_fields(self):
log = FormatStringWithMetadata(
- '■msg♦hello %d■file♦__FILE__■module♦log module name!')
+ '■msg♦hello %d■file♦__FILE__■module♦log module name!'
+ )
self.assertEqual(log.message, 'hello %d')
self.assertEqual(log.module, 'log module name!')
self.assertEqual(log.file, '__FILE__')
diff --git a/pw_log_tokenized/py/metadata_test.py b/pw_log_tokenized/py/metadata_test.py
index 1073a928d..83eb90421 100644
--- a/pw_log_tokenized/py/metadata_test.py
+++ b/pw_log_tokenized/py/metadata_test.py
@@ -20,6 +20,7 @@ from pw_log_tokenized import Metadata
class TestMetadata(unittest.TestCase):
"""Tests extracting fields from a pw_log_tokenized packed metadata value."""
+
def test_zero(self):
metadata = Metadata(0)
self.assertEqual(metadata.log_level, 0)
@@ -28,22 +29,22 @@ class TestMetadata(unittest.TestCase):
self.assertEqual(metadata.module_token, 0)
def test_various(self):
- metadata = Metadata(0xABCD << 16 | 1 << 14 | 1234 << 3 | 5,
- log_bits=3,
- line_bits=11,
- flag_bits=2,
- module_bits=16)
+ metadata = Metadata(
+ 0xABCD << 16 | 1 << 14 | 1234 << 3 | 5,
+ log_bits=3,
+ line_bits=11,
+ flag_bits=2,
+ module_bits=16,
+ )
self.assertEqual(metadata.log_level, 5)
self.assertEqual(metadata.line, 1234)
self.assertEqual(metadata.flags, 1)
self.assertEqual(metadata.module_token, 0xABCD)
def test_max(self):
- metadata = Metadata(0xFFFFFFFF,
- log_bits=3,
- line_bits=11,
- flag_bits=2,
- module_bits=16)
+ metadata = Metadata(
+ 0xFFFFFFFF, log_bits=3, line_bits=11, flag_bits=2, module_bits=16
+ )
self.assertEqual(metadata.log_level, 7)
self.assertEqual(metadata.line, 2047)
self.assertEqual(metadata.flags, 3)
diff --git a/pw_log_tokenized/py/pw_log_tokenized/__init__.py b/pw_log_tokenized/py/pw_log_tokenized/__init__.py
index d0cc1d14f..9fff100cc 100644
--- a/pw_log_tokenized/py/pw_log_tokenized/__init__.py
+++ b/pw_log_tokenized/py/pw_log_tokenized/__init__.py
@@ -13,9 +13,9 @@
# the License.
"""Tools for working with tokenized logs."""
-from dataclasses import dataclass
+from dataclasses import dataclass, asdict
import re
-from typing import Dict, Mapping
+from typing import Dict, Mapping, Iterator
def _mask(value: int, start: int, count: int) -> int:
@@ -23,33 +23,38 @@ def _mask(value: int, start: int, count: int) -> int:
return (value & (mask << start)) >> start
+@dataclass
class Metadata:
"""Parses the metadata payload used by pw_log_tokenized."""
- def __init__(self,
- value: int,
- *,
- log_bits: int = 3,
- line_bits: int = 11,
- flag_bits: int = 2,
- module_bits: int = 16) -> None:
- self.value = value
-
- self.log_level = _mask(value, 0, log_bits)
- self.line = _mask(value, log_bits, line_bits)
- self.flags = _mask(value, log_bits + line_bits, flag_bits)
- self.module_token = _mask(value, log_bits + line_bits + flag_bits,
- module_bits)
- def __repr__(self) -> str:
- return (f'{type(self).__name__}('
- f'log_level={self.log_level}, '
- f'line={self.line}, '
- f'flags={self.flags}, '
- f'module_token={self.module_token})')
+ value: int
+ log_bits: int = 3
+ line_bits: int = 11
+ flag_bits: int = 2
+ module_bits: int = 16
+
+ def __post_init__(self):
+ self.log_level = _mask(self.value, 0, self.log_bits)
+ self.line = _mask(self.value, self.log_bits, self.line_bits)
+ self.flags = _mask(
+ self.value, self.log_bits + self.line_bits, self.flag_bits
+ )
+ self.module_token = _mask(
+ self.value,
+ self.log_bits + self.line_bits + self.flag_bits,
+ self.module_bits,
+ )
+
+ def __iter__(self):
+ return iter(asdict(self).items())
+
+ def __dict__(self):
+ return asdict(self)
class FormatStringWithMetadata:
"""Parses metadata from a log format string with metadata fields."""
+
_FIELD_KEY = re.compile(r'■([a-zA-Z]\w*)♦', flags=re.ASCII)
def __init__(self, string: str) -> None: