diff options
author | Wyatt Hepler <hepler@google.com> | 2022-04-01 16:13:20 -0700 |
---|---|---|
committer | CQ Bot Account <pigweed-scoped@luci-project-accounts.iam.gserviceaccount.com> | 2022-04-04 23:57:37 +0000 |
commit | ff9933f2cf97eddb625d547ef9a86e9af95326ac (patch) | |
tree | 57a8a4c269bcac73b7c0bdbffb4ec3f8ff6facfd | |
parent | e520091eebfc1c5527d20b9acf37269b0d8811e3 (diff) | |
download | pigweed-ff9933f2cf97eddb625d547ef9a86e9af95326ac.tar.gz |
pw_system: Use AutoUpdatingDetokenizer
- Automatically reload the token database when database files change.
- Add type annotations to AutoUpdatingDetokenizer.
Change-Id: I086fbf3d9ad2c348dc12f134f8c5ee230ab0286b
Reviewed-on: https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/89642
Pigweed-Auto-Submit: Wyatt Hepler <hepler@google.com>
Reviewed-by: Keir Mierle <keir@google.com>
Commit-Queue: Auto-Submit <auto-submit@pigweed.google.com.iam.gserviceaccount.com>
-rw-r--r-- | pw_system/py/pw_system/console.py | 12 | ||||
-rwxr-xr-x | pw_tokenizer/py/pw_tokenizer/detokenize.py | 12 |
2 files changed, 13 insertions, 11 deletions
diff --git a/pw_system/py/pw_system/console.py b/pw_system/py/pw_system/console.py index 8a9de01e0..7843837a4 100644 --- a/pw_system/py/pw_system/console.py +++ b/pw_system/py/pw_system/console.py @@ -61,9 +61,7 @@ from pw_console.plugins.bandwidth_toolbar import BandwidthToolbar from pw_log.proto import log_pb2 from pw_rpc.console_tools.console import flattened_rpc_completions from pw_system.device import Device -from pw_tokenizer.database import LoadTokenDatabases -from pw_tokenizer.detokenize import Detokenizer -from pw_tokenizer import tokens +from pw_tokenizer.detokenize import AutoUpdatingDetokenizer _LOG = logging.getLogger('tools') _DEVICE_LOG = logging.getLogger('rpc_device') @@ -105,7 +103,7 @@ def _parse_args(): parser.add_argument("--token-databases", metavar='elf_or_token_database', nargs="+", - action=LoadTokenDatabases, + type=Path, help="Path to tokenizer database csv file(s).") parser.add_argument('--config-file', type=Path, @@ -211,7 +209,7 @@ class SocketClientImpl: def console(device: str, baudrate: int, proto_globs: Collection[str], - token_databases: Collection[tokens.Database], + token_databases: Collection[Path], socket_addr: str, logfile: str, output: Any, @@ -235,8 +233,8 @@ def console(device: str, detokenizer = None if token_databases: - detokenizer = Detokenizer(tokens.Database.merged(*token_databases), - show_errors=True) + detokenizer = AutoUpdatingDetokenizer(*token_databases) + detokenizer.show_errors = True if not proto_globs: proto_globs = ['**/*.proto'] diff --git a/pw_tokenizer/py/pw_tokenizer/detokenize.py b/pw_tokenizer/py/pw_tokenizer/detokenize.py index ddd698dd8..f4c59f040 100755 --- a/pw_tokenizer/py/pw_tokenizer/detokenize.py +++ b/pw_tokenizer/py/pw_tokenizer/detokenize.py @@ -44,8 +44,9 @@ import string import struct import sys import time -from typing import (AnyStr, BinaryIO, Callable, Dict, List, Iterable, Iterator, - Match, NamedTuple, Optional, Pattern, Tuple, Union) +from typing import (AnyStr, BinaryIO, Callable, Dict, List, Iterable, IO, + Iterator, Match, NamedTuple, Optional, Pattern, Tuple, + Union) try: from pw_tokenizer import database, decode, encode, tokens @@ -299,11 +300,14 @@ class Detokenizer: return decode_and_detokenize +_PathOrFile = Union[IO, str, Path] + + class AutoUpdatingDetokenizer(Detokenizer): """Loads and updates a detokenizer from database paths.""" class _DatabasePath: """Tracks the modified time of a path or file object.""" - def __init__(self, path): + def __init__(self, path: _PathOrFile) -> None: self.path = path if isinstance(path, (str, Path)) else path.name self._modified_time: Optional[float] = self._last_modified_time() @@ -329,7 +333,7 @@ class AutoUpdatingDetokenizer(Detokenizer): return database.load_token_database() def __init__(self, - *paths_or_files, + *paths_or_files: _PathOrFile, min_poll_period_s: float = 1.0) -> None: self.paths = tuple(self._DatabasePath(path) for path in paths_or_files) self.min_poll_period_s = min_poll_period_s |