aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHenri Chataing <henrichataing@google.com>2023-06-20 16:39:51 +0000
committerHenri Chataing <henrichataing@google.com>2023-06-20 20:53:50 +0000
commite6036688e26d7c89885bb51e23d3f03495125bff (patch)
tree2df2fcbccd2392d4681c8ae8ff7004a2583a8bd3
parentdb9fffe8304d4c6c59b5695c1394495d8c3290e5 (diff)
downloadpdl-compiler-e6036688e26d7c89885bb51e23d3f03495125bff.tar.gz
Update pdl-compiler to 0.1.2
Test: TreeHugger Change-Id: Ia18a174548a23d7bd9a21e41934d5ed123e44d6e
-rw-r--r--.cargo_vcs_info.json6
-rw-r--r--.gitignore3
-rw-r--r--Android.bp42
-rw-r--r--Cargo.toml5
-rw-r--r--Cargo.toml.orig3
-rw-r--r--METADATA12
-rw-r--r--cargo2android.json1
-rw-r--r--cargo2android_toplevel.bp5
-rw-r--r--examples/jpeg.pdl88
-rw-r--r--examples/pcap.pdl27
-rwxr-xr-xscripts/generate_cxx_backend.py305
-rwxr-xr-xscripts/generate_cxx_backend_tests.py48
-rw-r--r--scripts/packet_runtime.h25
-rw-r--r--src/analyzer.rs619
-rw-r--r--src/backends/rust.rs40
-rw-r--r--src/backends/rust/parser.rs14
-rw-r--r--src/backends/rust/serializer.rs2
-rw-r--r--src/lib.rs24
-rw-r--r--src/lint.rs61
-rw-r--r--src/main.rs31
-rw-r--r--src/test_utils.rs91
-rw-r--r--tests/python_generator_test.py30
-rwxr-xr-xtests/run_cxx_generator_tests.sh46
-rwxr-xr-xtests/run_python_generator_tests.sh24
24 files changed, 1232 insertions, 320 deletions
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json
new file mode 100644
index 0000000..686a0a1
--- /dev/null
+++ b/.cargo_vcs_info.json
@@ -0,0 +1,6 @@
+{
+ "git": {
+ "sha1": "c2c504a038cfe412ad8a3f8c0a1b747126eeedda"
+ },
+ "path_in_vcs": ""
+} \ No newline at end of file
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..e84ac28
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,3 @@
+Cargo.lock
+scripts/pdl/__pycache__/
+target/
diff --git a/Android.bp b/Android.bp
index 6c2e6e4..f8f8ee2 100644
--- a/Android.bp
+++ b/Android.bp
@@ -7,7 +7,7 @@ rust_binary_host {
name: "generate_canonical_tests",
crate_name: "generate_canonical_tests",
cargo_env_compat: true,
- cargo_pkg_version: "0.1.1",
+ cargo_pkg_version: "0.1.2",
srcs: ["src/bin/generate-canonical-tests.rs"],
edition: "2021",
features: [
@@ -18,6 +18,7 @@ rust_binary_host {
"libargh",
"libcodespan_reporting",
"libheck",
+ "libpdl_compiler",
"libpest",
"libprettyplease",
"libproc_macro2",
@@ -27,6 +28,36 @@ rust_binary_host {
"libsyn",
],
proc_macros: ["libpest_derive"],
+ compile_multilib: "first",
+ product_available: true,
+ vendor_available: true,
+}
+
+rust_library_host {
+ name: "libpdl_compiler",
+ crate_name: "pdl_compiler",
+ cargo_env_compat: true,
+ cargo_pkg_version: "0.1.2",
+ srcs: ["src/lib.rs"],
+ edition: "2021",
+ features: [
+ "default",
+ "serde",
+ ],
+ rustlibs: [
+ "libargh",
+ "libcodespan_reporting",
+ "libheck",
+ "libpest",
+ "libprettyplease",
+ "libproc_macro2",
+ "libquote",
+ "libserde",
+ "libserde_json",
+ "libsyn",
+ ],
+ proc_macros: ["libpest_derive"],
+ compile_multilib: "first",
product_available: true,
vendor_available: true,
}
@@ -35,7 +66,7 @@ rust_binary_host {
name: "pdlc",
crate_name: "pdlc",
cargo_env_compat: true,
- cargo_pkg_version: "0.1.1",
+ cargo_pkg_version: "0.1.2",
srcs: ["src/main.rs"],
edition: "2021",
features: [
@@ -46,6 +77,7 @@ rust_binary_host {
"libargh",
"libcodespan_reporting",
"libheck",
+ "libpdl_compiler",
"libpest",
"libprettyplease",
"libproc_macro2",
@@ -55,6 +87,7 @@ rust_binary_host {
"libsyn",
],
proc_macros: ["libpest_derive"],
+ compile_multilib: "first",
product_available: true,
vendor_available: true,
}
@@ -182,6 +215,7 @@ rust_test_host {
"libargh",
"libcodespan_reporting",
"libheck",
+ "libpdl_compiler",
"libpest",
"libprettyplease",
"libproc_macro2",
@@ -358,7 +392,7 @@ genrule {
"tests/canonical/le_test_file.pdl",
],
out: [
- "le_pdl_test.py",
+ "le_backend.py",
],
}
@@ -378,7 +412,7 @@ genrule {
":pdl_be_test_file",
],
out: [
- "be_pdl_test.py",
+ "be_backend.py",
],
}
diff --git a/Cargo.toml b/Cargo.toml
index eb0c8b5..6bbb7d4 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -12,7 +12,7 @@
[package]
edition = "2021"
name = "pdl-compiler"
-version = "0.1.1"
+version = "0.1.2"
authors = [
"Henri Chataing <henrichataing@google.com>",
"David de Jesus Duarte <licorne@google.com>",
@@ -81,6 +81,9 @@ version = "2.0.16"
version = "1.2.1"
features = ["serde"]
+[dev-dependencies.googletest]
+version = "0.7.0"
+
[dev-dependencies.num-derive]
version = "0.3.3"
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
index 618869a..878053d 100644
--- a/Cargo.toml.orig
+++ b/Cargo.toml.orig
@@ -1,6 +1,6 @@
[package]
name = "pdl-compiler"
-version = "0.1.1"
+version = "0.1.2"
edition = "2021"
description = "Parser and serializer generator for protocol binary packets"
repository = "https://github.com/google/pdl/"
@@ -49,3 +49,4 @@ num-derive = "0.3.3"
num-traits = "0.2.15"
thiserror = "1.0.37"
paste = "1.0.6"
+googletest = "0.7.0"
diff --git a/METADATA b/METADATA
index 7492467..5d18826 100644
--- a/METADATA
+++ b/METADATA
@@ -1,3 +1,7 @@
+# This project was upgraded with external_updater.
+# Usage: tools/external_updater/updater.sh update rust/crates/pdl-compiler
+# For more info, check https://cs.android.com/android/platform/superproject/+/master:tools/external_updater/README.md
+
name: "pdl-compiler"
description: "Parser and serializer generator for protocol binary packets"
third_party {
@@ -7,13 +11,13 @@ third_party {
}
url {
type: ARCHIVE
- value: "https://static.crates.io/crates/pdl-compiler/pdl-compiler-0.1.1.crate"
+ value: "https://static.crates.io/crates/pdl-compiler/pdl-compiler-0.1.2.crate"
}
- version: "0.1.1"
+ version: "0.1.2"
license_type: NOTICE
last_upgrade_date {
year: 2023
- month: 5
- day: 24
+ month: 6
+ day: 20
}
}
diff --git a/cargo2android.json b/cargo2android.json
index 83b0358..46d3c1a 100644
--- a/cargo2android.json
+++ b/cargo2android.json
@@ -1,4 +1,5 @@
{
"run": true,
+ "host-first-multilib": true,
"add-toplevel-block": "cargo2android_toplevel.bp"
}
diff --git a/cargo2android_toplevel.bp b/cargo2android_toplevel.bp
index dbe38f4..57fac63 100644
--- a/cargo2android_toplevel.bp
+++ b/cargo2android_toplevel.bp
@@ -121,6 +121,7 @@ rust_test_host {
"libargh",
"libcodespan_reporting",
"libheck",
+ "libpdl_compiler",
"libpest",
"libprettyplease",
"libproc_macro2",
@@ -297,7 +298,7 @@ genrule {
"tests/canonical/le_test_file.pdl",
],
out: [
- "le_pdl_test.py",
+ "le_backend.py",
],
}
@@ -317,7 +318,7 @@ genrule {
":pdl_be_test_file",
],
out: [
- "be_pdl_test.py",
+ "be_backend.py",
],
}
diff --git a/examples/jpeg.pdl b/examples/jpeg.pdl
new file mode 100644
index 0000000..75cde23
--- /dev/null
+++ b/examples/jpeg.pdl
@@ -0,0 +1,88 @@
+// Grammar file for the Jpeg File Format.
+// https://www.w3.org/Graphics/JPEG/jfif3.pdf
+
+big_endian_packets
+
+enum MarkerType : 8 {
+ NUL = 0x00, // JPEG reserved
+ TEM = 0x01, // temporary marker for arithmetic coding
+
+ // frame types
+ SOF0 = 0xc0, // start of frame (baseline jpeg)
+ SOF1 = 0xc1, // start of frame (extended sequential, huffman)
+ SOF2 = 0xc2, // start of frame (progressive, huffman)
+ SOF3 = 0xc3, // start of frame (lossless, huffman) libjpeg-unsupported
+ SOF5 = 0xc5, // start of frame (differential sequential, huffman) libjpeg-unsupported
+ SOF6 = 0xc6, // start of frame (differential progressive, huffman) libjpeg-unsupported
+ SOF7 = 0xc7, // start of frame (differential lossless, huffman) libjpeg-unsupported
+ SOF9 = 0xc9, // start of frame (extended sequential, arithmetic)
+ SOF10 = 0xca, // start of frame (progressive, arithmetic)
+ SOF11 = 0xcb, // start of frame (lossless, arithmetic) libjpeg-unsupported
+ SOF13 = 0xcd, // start of frame (differential sequential, arithmetic) libjpeg-unsupported
+ SOF14 = 0xce, // start of frame (differential progressive, arithmetic) libjpeg-unsupported
+ SOF15 = 0xcf, // start of frame (differential lossless, arithmetic) libjpeg-unsupported
+
+ DHT = 0xc4, // define huffman tables
+ JPG = 0xc8, // reserved for JPEG extension libjpeg-unsupported
+ DAC = 0xcc, // define arithmetic coding conditioning libjpeg-skipped
+
+ // restart markers (parameterless), only in scans data
+ RST = 0xd0..0xd7,
+
+ // delimiters
+ SOI = 0xd8, // start of image (parameterless)
+ EOI = 0xd9, // end of image (parameterless)
+ SOS = 0xda, // start of scan
+ DQT = 0xdb, // define quantization table(s)
+ DNL = 0xdc, // define number of lines # libjpeg-skipped
+ DRI = 0xdd, // define restart interval
+ DHP = 0xde, // define hierarchical progression
+ EXP = 0xdf, // expand reference components
+ COM = 0xfe, // extension data (comment)
+
+ // application segments
+ APP0 = 0xe0, // application segment 0 (JFIF (len >=14) / JFXX (len >= 6) / AVI MJPEG)
+ APP1 = 0xe1, // application segment 1 (EXIF/XMP/XAP ?)
+ APP2 = 0xe2, // application segment 2 (FlashPix / ICC)
+ APP3 = 0xe3, // application segment 3 (Kodak/...)
+ APP4 = 0xe4, // application segment 4 (FlashPix/...)
+ APP5 = 0xe5, // application segment 5 (Ricoh...)
+ APP6 = 0xe6, // application segment 6 (GoPro...)
+ APP7 = 0xe7, // application segment 7 (Pentax/Qualcomm)
+ APP8 = 0xe8, // application segment 8 (Spiff)
+ APP9 = 0xe9, // application segment 9 (MediaJukebox)
+ APP10 = 0xea, // application segment 10 (PhotoStudio)
+ APP11 = 0xeb, // application segment 11 (HDR)
+ APP12 = 0xec, // application segment 12 (photoshoP ducky / savE foR web)
+ APP13 = 0xed, // application segment 13 (photoshoP savE As)
+ APP14 = 0xee, // application segment 14 ("adobe" (length = 12))
+ APP15 = 0xef, // application segment 15 (GraphicConverter)
+}
+
+struct Marker {
+ _fixed_ = 0xff : 8,
+ type: MarkerType,
+ _payload_,
+}
+
+struct Segment: Marker {
+ _size_(_payload_): 16,
+ _payload_ [+2],
+}
+
+struct StartOfImage : Marker(type = SOI) {}
+struct EndOfImage : Marker(type = EOI) {}
+
+packet Image {
+ start: StartOfImage,
+ segments: Segment[],
+ // The payload contains the Entropy-Coded Segment, which doesn't follow any
+ // similar convention despite the same segment name. They represent most of
+ // the file's data. Its length is unknown in advance, nor defined in the
+ // file. The only way to get its length is to either decode it or to
+ // fast-forward over it: just scan forward for a FF byte.
+ // If it's a restart marker (followed by D0 - D7) or a data FF
+ // (followed by 00), continue.
+ _payload_,
+ end: EndOfImage,
+}
diff --git a/examples/pcap.pdl b/examples/pcap.pdl
new file mode 100644
index 0000000..a40c0b0
--- /dev/null
+++ b/examples/pcap.pdl
@@ -0,0 +1,27 @@
+// Grammar file for the Libpcap File Format.
+// https://wiki.wireshark.org/Development/LibpcapFileFormat
+
+little_endian_packets
+
+struct PcapHeader {
+ _fixed_ = 0xa1b2c3d4: 32, /* magic number */
+ version_major: 16,
+ version_minor: 16,
+ thiszone: 32, /* GMT to local correction */
+ sigfigs: 32, /* accuracy of timestamps */
+ snaplen: 32, /* max length of captured packets, in octets */
+ network: 32, /* data link type */
+}
+
+struct PcapRecord {
+ ts_sec: 32, /* timestamp seconds */
+ ts_usec: 32, /* timestamp microseconds */
+ _size_(_payload_): 32, /* number of octets of packet saved in file */
+ orig_len: 32, /* actual length of packet */
+ _payload_, /* packet octets */
+}
+
+packet PcapFile {
+ header: PcapHeader,
+ records: PcapRecord[],
+}
diff --git a/scripts/generate_cxx_backend.py b/scripts/generate_cxx_backend.py
index a728d13..0c8b9e3 100755
--- a/scripts/generate_cxx_backend.py
+++ b/scripts/generate_cxx_backend.py
@@ -278,8 +278,36 @@ class FieldParser:
f"pdl::packet::slice remaining_span = span.subrange({padded_size}, span.size() - {padded_size});")
self.append_(f"span = span.subrange(0, {padded_size});")
+ # The array count is known statically, elements are scalar.
+ if field.width and field.size:
+ assert size is not None
+ self.check_size_(size)
+ element_size = int(field.width / 8)
+ self.append_(f"for (size_t n = 0; n < {field.size}; n++) {{")
+ self.append_(f" {field.id}_[n] = span.read_{self.byteorder}<{element_type}, {element_size}>();")
+ self.append_("}")
+
+ # The array count is known statically, elements are enum values.
+ elif isinstance(field.type, ast.EnumDeclaration) and field.size:
+ assert size is not None
+ self.check_size_(size)
+ element_size = int(field.type.width / 8)
+ backing_type = get_cxx_scalar_type(field.type.width)
+ self.append_(f"for (size_t n = 0; n < {field.size}; n++) {{")
+ self.append_(
+ f" {field.id}_[n] = {element_type}(span.read_{self.byteorder}<{backing_type}, {element_size}>());")
+ self.append_("}")
+
+ # The array count is known statically, elements have variable size.
+ elif field.size:
+ self.append_(f"for (size_t n = 0; n < {field.size}; n++) {{")
+ self.append_(f" if (!{element_type}::Parse(span, &{field.id}_[n])) {{")
+ self.append_(" return false;")
+ self.append_(" }")
+ self.append_("}")
+
# The array size is known in bytes.
- if size is not None:
+ elif size is not None:
self.check_size_(size)
self.append_("{")
self.append_(f"pdl::packet::slice temp_span = span.subrange(0, {size});")
@@ -715,25 +743,48 @@ def generate_enum_to_text(decl: ast.EnumDeclaration) -> str:
""").format(enum_name=enum_name, tag_cases=indent(tag_cases, 2))
-def generate_packet_field_members(decl: ast.Declaration, view: bool) -> List[str]:
+def generate_packet_view_field_members(decl: ast.Declaration) -> List[str]:
"""Return the declaration of fields that are backed in the view
class declaration.
Backed fields include all named fields that do not have a constrained
value in the selected declaration and its parents.
- :param decl: target declaration
- :param view: if true the payload and array fields are generated as slices"""
+ :param decl: target declaration"""
fields = core.get_unconstrained_parent_fields(decl) + decl.fields
members = []
for field in fields:
- if isinstance(field, (ast.PayloadField, ast.BodyField)) and view:
+ if isinstance(field, (ast.PayloadField, ast.BodyField)):
members.append("pdl::packet::slice payload_;")
- elif isinstance(field, (ast.PayloadField, ast.BodyField)):
- members.append("std::vector<uint8_t> payload_;")
- elif isinstance(field, ast.ArrayField) and view:
+ elif isinstance(field, ast.ArrayField):
members.append(f"pdl::packet::slice {field.id}_;")
+ elif isinstance(field, ast.ScalarField):
+ members.append(f"{get_cxx_scalar_type(field.width)} {field.id}_{{0}};")
+ elif isinstance(field, ast.TypedefField) and isinstance(field.type, ast.EnumDeclaration):
+ members.append(f"{field.type_id} {field.id}_{{{field.type_id}::{field.type.tags[0].id}}};")
+ elif isinstance(field, ast.TypedefField):
+ members.append(f"{field.type_id} {field.id}_;")
+
+ return members
+
+
+def generate_packet_field_members(decl: ast.Declaration) -> List[str]:
+ """Return the declaration of fields that are backed in the view
+ class declaration.
+
+ Backed fields include all named fields that do not have a constrained
+ value in the selected declaration and its parents.
+
+ :param decl: target declaration"""
+
+ members = []
+ for field in decl.fields:
+ if isinstance(field, (ast.PayloadField, ast.BodyField)) and not decl.parent:
+ members.append("std::vector<uint8_t> payload_;")
+ elif isinstance(field, ast.ArrayField) and field.size:
+ element_type = field.type_id or get_cxx_scalar_type(field.width)
+ members.append(f"std::array<{element_type}, {field.size}> {field.id}_;")
elif isinstance(field, ast.ArrayField):
element_type = field.type_id or get_cxx_scalar_type(field.width)
members.append(f"std::vector<{element_type}> {field.id}_;")
@@ -774,16 +825,29 @@ def generate_scalar_array_field_accessor(field: ast.ArrayField) -> str:
element_size = int(field.width / 8)
backing_type = get_cxx_scalar_type(field.width)
byteorder = field.parent.file.byteorder_short
- return dedent("""\
- pdl::packet::slice span = {field_id}_;
- std::vector<{backing_type}> elements;
- while (span.size() >= {element_size}) {{
- elements.push_back(span.read_{byteorder}<{backing_type}, {element_size}>());
- }}
- return elements;""").format(field_id=field.id,
- backing_type=backing_type,
- element_size=element_size,
- byteorder=byteorder)
+ if field.size:
+ return dedent("""\
+ pdl::packet::slice span = {field_id}_;
+ std::array<{backing_type}, {array_size}> elements;
+ for (int n = 0; n < {array_size}; n++) {{
+ elements[n] = span.read_{byteorder}<{backing_type}, {element_size}>();
+ }}
+ return elements;""").format(field_id=field.id,
+ backing_type=backing_type,
+ element_size=element_size,
+ array_size=field.size,
+ byteorder=byteorder)
+ else:
+ return dedent("""\
+ pdl::packet::slice span = {field_id}_;
+ std::vector<{backing_type}> elements;
+ while (span.size() >= {element_size}) {{
+ elements.push_back(span.read_{byteorder}<{backing_type}, {element_size}>());
+ }}
+ return elements;""").format(field_id=field.id,
+ backing_type=backing_type,
+ element_size=element_size,
+ byteorder=byteorder)
def generate_enum_array_field_accessor(field: ast.ArrayField) -> str:
@@ -791,32 +855,55 @@ def generate_enum_array_field_accessor(field: ast.ArrayField) -> str:
element_size = int(field.type.width / 8)
backing_type = get_cxx_scalar_type(field.type.width)
byteorder = field.parent.file.byteorder_short
- return dedent("""\
- pdl::packet::slice span = {field_id}_;
- std::vector<{enum_type}> elements;
- while (span.size() >= {element_size}) {{
- elements.push_back({enum_type}(span.read_{byteorder}<{backing_type}, {element_size}>()));
- }}
- return elements;""").format(field_id=field.id,
- enum_type=field.type_id,
- backing_type=backing_type,
- element_size=element_size,
- byteorder=byteorder)
+ if field.size:
+ return dedent("""\
+ pdl::packet::slice span = {field_id}_;
+ std::array<{enum_type}, {array_size}> elements;
+ for (int n = 0; n < {array_size}; n++) {{
+ elements[n] = {enum_type}(span.read_{byteorder}<{backing_type}, {element_size}>());
+ }}
+ return elements;""").format(field_id=field.id,
+ enum_type=field.type.id,
+ backing_type=backing_type,
+ element_size=element_size,
+ array_size=field.size,
+ byteorder=byteorder)
+ else:
+ return dedent("""\
+ pdl::packet::slice span = {field_id}_;
+ std::vector<{enum_type}> elements;
+ while (span.size() >= {element_size}) {{
+ elements.push_back({enum_type}(span.read_{byteorder}<{backing_type}, {element_size}>()));
+ }}
+ return elements;""").format(field_id=field.id,
+ enum_type=field.type_id,
+ backing_type=backing_type,
+ element_size=element_size,
+ byteorder=byteorder)
def generate_typedef_array_field_accessor(field: ast.ArrayField) -> str:
"""Parse the selected typedef array field."""
- return dedent("""\
- pdl::packet::slice span = {field_id}_;
- std::vector<{struct_type}> elements;
- for (;;) {{
- {struct_type} element;
- if (!{struct_type}::Parse(span, &element)) {{
- break;
+ if field.size:
+ return dedent("""\
+ pdl::packet::slice span = {field_id}_;
+ std::array<{struct_type}, {array_size}> elements;
+ for (int n = 0; n < {array_size}; n++) {{
+ {struct_type}::Parse(span, &elements[n]);
}}
- elements.emplace_back(std::move(element));
- }}
- return elements;""").format(field_id=field.id, struct_type=field.type_id)
+ return elements;""").format(field_id=field.id, struct_type=field.type_id, array_size=field.size)
+ else:
+ return dedent("""\
+ pdl::packet::slice span = {field_id}_;
+ std::vector<{struct_type}> elements;
+ for (;;) {{
+ {struct_type} element;
+ if (!{struct_type}::Parse(span, &element)) {{
+ break;
+ }}
+ elements.emplace_back(std::move(element));
+ }}
+ return elements;""").format(field_id=field.id, struct_type=field.type_id)
def generate_array_field_accessor(field: ast.ArrayField):
@@ -923,15 +1010,16 @@ def generate_packet_view_field_accessors(packet: ast.PacketDeclaration) -> List[
"""))
elif isinstance(field, ast.ArrayField):
element_type = field.type_id or get_cxx_scalar_type(field.width)
+ array_type = (f"std::array<{element_type}, {field.size}>" if field.size else f"std::vector<{element_type}>")
accessor_name = to_pascal_case(field.id)
accessors.append(
dedent("""\
- std::vector<{element_type}> Get{accessor_name}() const {{
+ {array_type} Get{accessor_name}() const {{
ASSERT(valid_);
{accessor}
}}
- """).format(element_type=element_type,
+ """).format(array_type=array_type,
accessor_name=accessor_name,
accessor=indent(generate_array_field_accessor(field), 1)))
elif isinstance(field, ast.ScalarField):
@@ -1072,11 +1160,12 @@ def generate_packet_view(packet: ast.PacketDeclaration) -> str:
packet declaration."""
parent_class = f"{packet.parent.id}View" if packet.parent else "pdl::packet::slice"
- field_members = generate_packet_field_members(packet, view=True)
+ field_members = generate_packet_view_field_members(packet)
field_accessors = generate_packet_view_field_accessors(packet)
field_parsers = generate_packet_view_field_parsers(packet)
friend_classes = generate_packet_view_friend_classes(packet)
stringifier = generate_packet_stringifier(packet)
+ bytes_initializer = f"parent.bytes_" if packet.parent else "parent"
return dedent("""\
@@ -1093,8 +1182,13 @@ def generate_packet_view(packet: ast.PacketDeclaration) -> str:
return valid_;
}}
+ pdl::packet::slice bytes() const {{
+ return bytes_;
+ }}
+
protected:
- explicit {packet_name}View({parent_class} const& parent) {{
+ explicit {packet_name}View({parent_class} const& parent)
+ : bytes_({bytes_initializer}) {{
valid_ = Parse(parent);
}}
@@ -1103,12 +1197,14 @@ def generate_packet_view(packet: ast.PacketDeclaration) -> str:
}}
bool valid_{{false}};
+ pdl::packet::slice bytes_;
{field_members}
{friend_classes}
}};
""").format(packet_name=packet.id,
parent_class=parent_class,
+ bytes_initializer=bytes_initializer,
field_accessors=indent(field_accessors, 1),
field_members=indent(field_members, 1),
field_parsers=indent(field_parsers, 2),
@@ -1122,12 +1218,36 @@ def generate_packet_constructor(struct: ast.StructDeclaration, constructor_name:
constructor_params = []
constructor_initializers = []
- fields = core.get_unconstrained_parent_fields(struct) + struct.fields
+ inherited_fields = core.get_unconstrained_parent_fields(struct)
+ payload_initializer = ''
+ parent_initializer = []
- for field in fields:
+ for field in inherited_fields:
+ if isinstance(field, ast.ArrayField) and field.size:
+ element_type = field.type_id or get_cxx_scalar_type(field.width)
+ constructor_params.append(f"std::array<{element_type}, {field.size}> {field.id}")
+ elif isinstance(field, ast.ArrayField):
+ element_type = field.type_id or get_cxx_scalar_type(field.width)
+ constructor_params.append(f"std::vector<{element_type}> {field.id}")
+ elif isinstance(field, ast.ScalarField):
+ backing_type = get_cxx_scalar_type(field.width)
+ constructor_params.append(f"{backing_type} {field.id}")
+ elif (isinstance(field, ast.TypedefField) and isinstance(field.type, ast.EnumDeclaration)):
+ constructor_params.append(f"{field.type_id} {field.id}")
+ elif isinstance(field, ast.TypedefField):
+ constructor_params.append(f"{field.type_id} {field.id}")
+
+ for field in struct.fields:
if isinstance(field, (ast.PayloadField, ast.BodyField)):
constructor_params.append("std::vector<uint8_t> payload")
- constructor_initializers.append("payload_(std::move(payload))")
+ if struct.parent:
+ payload_initializer = f"payload_ = std::move(payload);"
+ else:
+ constructor_initializers.append("payload_(std::move(payload))")
+ elif isinstance(field, ast.ArrayField) and field.size:
+ element_type = field.type_id or get_cxx_scalar_type(field.width)
+ constructor_params.append(f"std::array<{element_type}, {field.size}> {field.id}")
+ constructor_initializers.append(f"{field.id}_(std::move({field.id}))")
elif isinstance(field, ast.ArrayField):
element_type = field.type_id or get_cxx_scalar_type(field.width)
constructor_params.append(f"std::vector<{element_type}> {field.id}")
@@ -1146,16 +1266,84 @@ def generate_packet_constructor(struct: ast.StructDeclaration, constructor_name:
if not constructor_params:
return ""
+ if struct.parent:
+ fields = core.get_unconstrained_parent_fields(struct.parent) + struct.parent.fields
+ parent_constructor_params = []
+ for field in fields:
+ constraints = [c for c in struct.constraints if c.id == getattr(field, 'id', None)]
+ if isinstance(field, (ast.PayloadField, ast.BodyField)):
+ parent_constructor_params.append("std::vector<uint8_t>{}")
+ elif isinstance(field, ast.ArrayField):
+ parent_constructor_params.append(f"std::move({field.id})")
+ elif isinstance(field, ast.ScalarField) and constraints:
+ parent_constructor_params.append(f"{constraints[0].value}")
+ elif isinstance(field, ast.ScalarField):
+ parent_constructor_params.append(f"{field.id}")
+ elif (isinstance(field, ast.TypedefField) and isinstance(field.type, ast.EnumDeclaration) and constraints):
+ parent_constructor_params.append(f"{field.type_id}::{constraints[0].tag_id}")
+ elif (isinstance(field, ast.TypedefField) and isinstance(field.type, ast.EnumDeclaration)):
+ parent_constructor_params.append(f"{field.id}")
+ elif isinstance(field, ast.TypedefField):
+ parent_constructor_params.append(f"std::move({field.id})")
+ parent_constructor_params = ', '.join(parent_constructor_params)
+ parent_initializer = [f"{struct.parent_id}Builder({parent_constructor_params})"]
+
explicit = 'explicit ' if len(constructor_params) == 1 else ''
constructor_params = ', '.join(constructor_params)
- constructor_initializers = ', '.join(constructor_initializers)
+ constructor_initializers = ', '.join(parent_initializer + constructor_initializers)
return dedent("""\
{explicit}{constructor_name}({constructor_params})
- : {constructor_initializers} {{}}""").format(explicit=explicit,
- constructor_name=constructor_name,
- constructor_params=constructor_params,
- constructor_initializers=constructor_initializers)
+ : {constructor_initializers} {{
+ {payload_initializer}
+ }}""").format(explicit=explicit,
+ constructor_name=constructor_name,
+ constructor_params=constructor_params,
+ payload_initializer=payload_initializer,
+ constructor_initializers=constructor_initializers)
+
+
+def generate_packet_creator(packet: ast.PacketDeclaration) -> str:
+ """Generate the implementation of the creator for a
+ struct declaration."""
+
+ constructor_name = f"{packet.id}Builder"
+ creator_params = []
+ constructor_params = []
+ fields = core.get_unconstrained_parent_fields(packet) + packet.fields
+
+ for field in fields:
+ if isinstance(field, (ast.PayloadField, ast.BodyField)):
+ creator_params.append("std::vector<uint8_t> payload")
+ constructor_params.append("std::move(payload)")
+ elif isinstance(field, ast.ArrayField) and field.size:
+ element_type = field.type_id or get_cxx_scalar_type(field.width)
+ creator_params.append(f"std::array<{element_type}, {field.size}> {field.id}")
+ constructor_params.append(f"std::move({field.id})")
+ elif isinstance(field, ast.ArrayField):
+ element_type = field.type_id or get_cxx_scalar_type(field.width)
+ creator_params.append(f"std::vector<{element_type}> {field.id}")
+ constructor_params.append(f"std::move({field.id})")
+ elif isinstance(field, ast.ScalarField):
+ backing_type = get_cxx_scalar_type(field.width)
+ creator_params.append(f"{backing_type} {field.id}")
+ constructor_params.append(f"{field.id}")
+ elif (isinstance(field, ast.TypedefField) and isinstance(field.type, ast.EnumDeclaration)):
+ creator_params.append(f"{field.type_id} {field.id}")
+ constructor_params.append(f"{field.id}")
+ elif isinstance(field, ast.TypedefField):
+ creator_params.append(f"{field.type_id} {field.id}")
+ constructor_params.append(f"std::move({field.id})")
+
+ creator_params = ', '.join(creator_params)
+ constructor_params = ', '.join(constructor_params)
+
+ return dedent("""\
+ static std::unique_ptr<{constructor_name}> Create({creator_params}) {{
+ return std::make_unique<{constructor_name}>({constructor_params});
+ }}""").format(constructor_name=constructor_name,
+ creator_params=creator_params,
+ constructor_params=constructor_params)
def generate_packet_builder(packet: ast.PacketDeclaration) -> str:
@@ -1163,15 +1351,17 @@ def generate_packet_builder(packet: ast.PacketDeclaration) -> str:
packet declaration."""
class_name = f'{packet.id}Builder'
+ parent_class = f'{packet.parent_id}Builder' if packet.parent_id else "pdl::packet::Builder"
builder_constructor = generate_packet_constructor(packet, constructor_name=class_name)
- field_members = generate_packet_field_members(packet, view=False)
+ builder_creator = generate_packet_creator(packet)
+ field_members = generate_packet_field_members(packet)
field_serializers = generate_packet_field_serializers(packet)
size_getter = generate_packet_size_getter(packet)
array_field_size_getters = generate_array_field_size_getters(packet)
return dedent("""\
- class {class_name} : public pdl::packet::Builder {{
+ class {class_name} : public {parent_class} {{
public:
~{class_name}() override = default;
{class_name}() = default;
@@ -1179,6 +1369,7 @@ def generate_packet_builder(packet: ast.PacketDeclaration) -> str:
{class_name}({class_name}&&) = default;
{class_name}& operator=({class_name} const&) = default;
{builder_constructor}
+ {builder_creator}
void Serialize(std::vector<uint8_t>& output) const override {{
{field_serializers}
@@ -1192,7 +1383,9 @@ def generate_packet_builder(packet: ast.PacketDeclaration) -> str:
{field_members}
}};
""").format(class_name=f'{packet.id}Builder',
+ parent_class=parent_class,
builder_constructor=builder_constructor,
+ builder_creator=builder_creator,
field_members=indent(field_members, 1),
field_serializers=indent(field_serializers, 2),
size_getter=indent(size_getter, 1),
@@ -1211,6 +1404,10 @@ def generate_struct_field_parsers(struct: ast.StructDeclaration) -> str:
if isinstance(field, (ast.PayloadField, ast.BodyField)):
code.append("std::vector<uint8_t> payload_;")
parsed_fields.append("std::move(payload_)")
+ elif isinstance(field, ast.ArrayField) and field.size:
+ element_type = field.type_id or get_cxx_scalar_type(field.width)
+ code.append(f"std::array<{element_type}, {field.size}> {field.id}_;")
+ parsed_fields.append(f"std::move({field.id}_)")
elif isinstance(field, ast.ArrayField):
element_type = field.type_id or get_cxx_scalar_type(field.width)
code.append(f"std::vector<{element_type}> {field.id}_;")
@@ -1250,7 +1447,7 @@ def generate_struct_declaration(struct: ast.StructDeclaration) -> str:
raise Exception("Struct declaration with parents are not supported")
struct_constructor = generate_packet_constructor(struct, constructor_name=struct.id)
- field_members = generate_packet_field_members(struct, view=False)
+ field_members = generate_packet_field_members(struct)
field_parsers = generate_struct_field_parsers(struct)
field_serializers = generate_packet_field_serializers(struct)
size_getter = generate_packet_size_getter(struct)
diff --git a/scripts/generate_cxx_backend_tests.py b/scripts/generate_cxx_backend_tests.py
index 1f90600..123b08c 100755
--- a/scripts/generate_cxx_backend_tests.py
+++ b/scripts/generate_cxx_backend_tests.py
@@ -80,6 +80,14 @@ def generate_packet_parser_test(parser_test_suite: str, packet: ast.PacketDeclar
checks.append("};")
checks.append(f"ASSERT_EQ({get_field(decl, var, id)}, expected_{field_var});")
+ elif isinstance(field, ast.ArrayField) and field.size and field.width:
+ checks.append(f"std::array<{get_cxx_scalar_type(field.width)}, {field.size}> expected_{field_var} {{")
+ step = int(16 * 8 / field.width)
+ for i in range(0, len(value), step):
+ checks.append(' ' + ' '.join([f"0x{v:x}," for v in value[i:i + step]]))
+ checks.append("};")
+ checks.append(f"ASSERT_EQ({get_field(decl, var, id)}, expected_{field_var});")
+
elif isinstance(field, ast.ArrayField) and field.width:
checks.append(f"std::vector<{get_cxx_scalar_type(field.width)}> expected_{field_var} {{")
step = int(16 * 8 / field.width)
@@ -88,6 +96,13 @@ def generate_packet_parser_test(parser_test_suite: str, packet: ast.PacketDeclar
checks.append("};")
checks.append(f"ASSERT_EQ({get_field(decl, var, id)}, expected_{field_var});")
+ elif (isinstance(field, ast.ArrayField) and field.size and isinstance(field.type, ast.EnumDeclaration)):
+ checks.append(f"std::array<{field.type_id}, {field.size}> expected_{field_var} {{")
+ for v in value:
+ checks.append(f" {field.type_id}({v}),")
+ checks.append("};")
+ checks.append(f"ASSERT_EQ({get_field(decl, var, id)}, expected_{field_var});")
+
elif (isinstance(field, ast.ArrayField) and isinstance(field.type, ast.EnumDeclaration)):
checks.append(f"std::vector<{field.type_id}> expected_{field_var} {{")
for v in value:
@@ -95,6 +110,12 @@ def generate_packet_parser_test(parser_test_suite: str, packet: ast.PacketDeclar
checks.append("};")
checks.append(f"ASSERT_EQ({get_field(decl, var, id)}, expected_{field_var});")
+ elif isinstance(field, ast.ArrayField) and field.size:
+ checks.append(f"std::array<{field.type_id}, {field.size}> {field_var} = {get_field(decl, var, id)};")
+ checks.append(f"ASSERT_EQ({field_var}.size(), {len(value)});")
+ for (n, value) in enumerate(value):
+ checks.extend(check_members(field.type, f"{field_var}[{n}]", value))
+
elif isinstance(field, ast.ArrayField):
checks.append(f"std::vector<{field.type_id}> {field_var} = {get_field(decl, var, id)};")
checks.append(f"ASSERT_EQ({field_var}.size(), {len(value)});")
@@ -166,6 +187,14 @@ def generate_packet_serializer_test(serializer_test_suite: str, packet: ast.Pack
declarations.append("};")
parameters.append(f"std::move({field_var})")
+ elif isinstance(field, ast.ArrayField) and field.size and field.width:
+ declarations.append(f"std::array<{get_cxx_scalar_type(field.width)}, {field.size}> {field_var} {{")
+ step = int(16 * 8 / field.width)
+ for i in range(0, len(value), step):
+ declarations.append(' ' + ' '.join([f"0x{v:x}," for v in value[i:i + step]]))
+ declarations.append("};")
+ parameters.append(f"std::move({field_var})")
+
elif isinstance(field, ast.ArrayField) and field.width:
declarations.append(f"std::vector<{get_cxx_scalar_type(field.width)}> {field_var} {{")
step = int(16 * 8 / field.width)
@@ -174,6 +203,13 @@ def generate_packet_serializer_test(serializer_test_suite: str, packet: ast.Pack
declarations.append("};")
parameters.append(f"std::move({field_var})")
+ elif isinstance(field, ast.ArrayField) and field.size and isinstance(field.type, ast.EnumDeclaration):
+ declarations.append(f"std::array<{field.type_id}, {field.size}> {field_var} {{")
+ for v in value:
+ declarations.append(f" {field.type_id}({v}),")
+ declarations.append("};")
+ parameters.append(f"std::move({field_var})")
+
elif isinstance(field, ast.ArrayField) and isinstance(field.type, ast.EnumDeclaration):
declarations.append(f"std::vector<{field.type_id}> {field_var} {{")
for v in value:
@@ -181,6 +217,18 @@ def generate_packet_serializer_test(serializer_test_suite: str, packet: ast.Pack
declarations.append("};")
parameters.append(f"std::move({field_var})")
+ elif isinstance(field, ast.ArrayField) and field.size:
+ elements = []
+ for (n, value) in enumerate(value):
+ (element, intermediate_declarations) = build_packet(field.type, f'{field_var}_{n}', value)
+ elements.append(element)
+ declarations.extend(intermediate_declarations)
+ declarations.append(f"std::array<{field.type_id}, {field.size}> {field_var} {{")
+ for element in elements:
+ declarations.append(f" {element},")
+ declarations.append("};")
+ parameters.append(f"std::move({field_var})")
+
elif isinstance(field, ast.ArrayField):
elements = []
for (n, value) in enumerate(value):
diff --git a/scripts/packet_runtime.h b/scripts/packet_runtime.h
index c9e1420..4a76383 100644
--- a/scripts/packet_runtime.h
+++ b/scripts/packet_runtime.h
@@ -16,16 +16,12 @@
#pragma once
+#include <cassert>
#include <cstdint>
#include <memory>
#include <utility>
#include <vector>
-#ifndef ASSERT
-#include <cassert>
-#define ASSERT assert
-#endif // !ASSERT
-
namespace pdl::packet {
/// Representation of a raw packet slice.
@@ -46,7 +42,7 @@ class slice {
/// current slice. The range ['offset', 'offset' + 'slice') must be
/// contained within the bonuds of the current slice.
slice subrange(size_t offset, size_t size) const {
- ASSERT((offset + size) <= size_);
+ assert((offset + size) <= size_);
return slice(packet_, offset_ + offset, size);
}
@@ -57,7 +53,7 @@ class slice {
template <typename T, size_t N = sizeof(T)>
T read_le() {
static_assert(N <= sizeof(T));
- ASSERT(N <= size_);
+ assert(N <= size_);
T value = 0;
for (size_t n = 0; n < N; n++) {
value |= (T)at(n) << (8 * n);
@@ -73,7 +69,7 @@ class slice {
template <typename T, size_t N = sizeof(T)>
T read_be() {
static_assert(N <= sizeof(T));
- ASSERT(N <= size_);
+ assert(N <= size_);
T value = 0;
for (size_t n = 0; n < N; n++) {
value = (value << 8) | (T)at(n);
@@ -85,14 +81,14 @@ class slice {
/// Return the value of the byte at the given offset.
/// `offset` must be within the bounds of the slice.
uint8_t at(size_t offset) const {
- ASSERT(offset <= size_);
+ assert(offset <= size_);
return packet_->at(offset_ + offset);
}
/// Skip `size` bytes at the front of the slice.
/// `size` must be lower than or equal to the slice size.
void skip(size_t size) {
- ASSERT(size <= size_);
+ assert(size <= size_);
offset_ += size;
size_ -= size;
}
@@ -109,6 +105,13 @@ class slice {
packet_->cbegin() + offset_ + size_);
}
+ bool operator==(slice const& other) const {
+ return size_ == other.size_ &&
+ std::equal(packet_->begin() + offset_,
+ packet_->begin() + offset_ + size_,
+ other.packet_->begin());
+ }
+
private:
std::shared_ptr<const std::vector<uint8_t>> packet_;
size_t offset_{0};
@@ -147,7 +150,7 @@ class Builder {
}
/// Helper method to serialize the packet to a byte vector.
- std::vector<uint8_t> Serialize() const {
+ virtual std::vector<uint8_t> Serialize() const {
std::vector<uint8_t> output;
Serialize(output);
return output;
diff --git a/src/analyzer.rs b/src/analyzer.rs
index 733d4ef..1757e5e 100644
--- a/src/analyzer.rs
+++ b/src/analyzer.rs
@@ -26,7 +26,7 @@ pub mod ast {
use serde::Serialize;
/// Field and declaration size information.
- #[derive(Debug, Clone, Copy)]
+ #[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[allow(unused)]
pub enum Size {
/// Constant size in bits.
@@ -50,7 +50,7 @@ pub mod ast {
#[derive(Debug, Serialize, Default, Clone, PartialEq)]
pub struct Annotation;
- #[derive(Default, Debug, Clone)]
+ #[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct FieldAnnotation {
// Size of field.
pub size: Size,
@@ -59,7 +59,7 @@ pub mod ast {
pub padded_size: Option<usize>,
}
- #[derive(Default, Debug, Clone)]
+ #[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct DeclAnnotation {
// Size computed excluding the payload.
pub size: Size,
@@ -68,12 +68,6 @@ pub mod ast {
pub payload_size: Size,
}
- impl FieldAnnotation {
- pub fn new(size: Size) -> Self {
- FieldAnnotation { size, padded_size: None }
- }
- }
-
impl std::ops::Add for Size {
type Output = Size;
fn add(self, rhs: Size) -> Self::Output {
@@ -107,6 +101,36 @@ pub mod ast {
}
}
+ impl Size {
+ // Returns the width if the size is static.
+ pub fn static_(&self) -> Option<usize> {
+ match self {
+ Size::Static(size) => Some(*size),
+ Size::Dynamic | Size::Unknown => None,
+ }
+ }
+ }
+
+ impl DeclAnnotation {
+ pub fn total_size(&self) -> Size {
+ self.size + self.payload_size
+ }
+ }
+
+ impl FieldAnnotation {
+ pub fn new(size: Size) -> Self {
+ FieldAnnotation { size, padded_size: None }
+ }
+
+ // Returns the field width or padded width if static.
+ pub fn static_(&self) -> Option<usize> {
+ match self.padded_size {
+ Some(padding) => Some(8 * padding),
+ None => self.size.static_(),
+ }
+ }
+ }
+
impl crate::ast::Annotation for Annotation {
type FieldAnnotation = FieldAnnotation;
type DeclAnnotation = DeclAnnotation;
@@ -1282,8 +1306,19 @@ fn compute_field_sizes(file: &parser_ast::File) -> ast::File {
scope: &HashMap<String, ast::DeclAnnotation>,
) -> ast::Decl {
// Annotate the declaration fields.
+ // Add the padding information to the fields in the same pass.
let mut decl = decl.annotate(Default::default(), |fields| {
- fields.iter().map(|field| annotate_field(decl, field, scope)).collect()
+ let mut fields: Vec<_> =
+ fields.iter().map(|field| annotate_field(decl, field, scope)).collect();
+ let mut padding = None;
+ for field in fields.iter_mut().rev() {
+ field.annot.padded_size = padding;
+ padding = match &field.desc {
+ FieldDesc::Padding { size } => Some(*size),
+ _ => None,
+ };
+ }
+ fields
});
// Compute the declaration annotation.
@@ -1302,22 +1337,30 @@ fn compute_field_sizes(file: &parser_ast::File) -> ast::File {
FieldDesc::Payload { .. } | FieldDesc::Body { .. } => {
payload_size = field.annot.size
}
- _ => size = size + field.annot.size,
+ _ => {
+ size = size
+ + match field.annot.padded_size {
+ Some(padding) => ast::Size::Static(8 * padding),
+ None => field.annot.size,
+ }
+ }
}
}
ast::DeclAnnotation { size, payload_size }
}
DeclDesc::Enum { width, .. }
| DeclDesc::Checksum { width, .. }
- | DeclDesc::CustomField { width: Some(width), .. } => {
- ast::DeclAnnotation { size: ast::Size::Static(*width), ..decl.annot }
- }
+ | DeclDesc::CustomField { width: Some(width), .. } => ast::DeclAnnotation {
+ size: ast::Size::Static(*width),
+ payload_size: ast::Size::Static(0),
+ },
DeclDesc::CustomField { width: None, .. } => {
- ast::DeclAnnotation { size: ast::Size::Dynamic, ..decl.annot }
- }
- DeclDesc::Test { .. } => {
- ast::DeclAnnotation { size: ast::Size::Static(0), ..decl.annot }
+ ast::DeclAnnotation { size: ast::Size::Dynamic, payload_size: ast::Size::Static(0) }
}
+ DeclDesc::Test { .. } => ast::DeclAnnotation {
+ size: ast::Size::Static(0),
+ payload_size: ast::Size::Static(0),
+ },
};
decl
}
@@ -1407,28 +1450,6 @@ fn compute_field_sizes(file: &parser_ast::File) -> ast::File {
}
}
-/// Inline padding fields.
-/// The padding information is added directly to the targeted fields.
-fn inline_paddings(file: &mut ast::File) {
- for decl in file.declarations.iter_mut() {
- match &mut decl.desc {
- DeclDesc::Struct { fields, .. }
- | DeclDesc::Packet { fields, .. }
- | DeclDesc::Group { fields, .. } => {
- let mut padding = None;
- for field in fields.iter_mut().rev() {
- field.annot.padded_size = padding;
- padding = match &field.desc {
- FieldDesc::Padding { size } => Some(*size),
- _ => None,
- };
- }
- }
- _ => (),
- }
- }
-}
-
/// Inline group fields and remove group declarations.
fn inline_groups(file: &mut ast::File) -> Result<(), Diagnostics> {
fn inline_fields<'a>(
@@ -1509,7 +1530,6 @@ pub fn analyze(file: &parser_ast::File) -> Result<ast::File, Diagnostics> {
check_padding_fields(file)?;
check_checksum_fields(file, &scope)?;
let mut file = compute_field_sizes(file);
- inline_paddings(&mut file);
inline_groups(&mut file)?;
Ok(file)
}
@@ -1521,6 +1541,8 @@ mod test {
use crate::parser::parse_inline;
use codespan_reporting::term::termcolor;
+ use googletest::prelude::{assert_that, eq};
+
macro_rules! raises {
($code:ident, $text:literal) => {{
let mut db = SourceDatabase::new();
@@ -2534,6 +2556,521 @@ mod test {
);
}
+ use analyzer::ast::Size;
+ use Size::*;
+
+ #[derive(Debug, PartialEq, Eq)]
+ struct Annotations {
+ size: Size,
+ payload_size: Size,
+ fields: Vec<Size>,
+ }
+
+ fn annotations(text: &str) -> Vec<Annotations> {
+ let mut db = SourceDatabase::new();
+ let file =
+ parse_inline(&mut db, "stdin".to_owned(), text.to_owned()).expect("parsing failure");
+ let file = analyzer::analyze(&file).expect("analyzer failure");
+ file.declarations
+ .iter()
+ .map(|decl| Annotations {
+ size: decl.annot.size,
+ payload_size: decl.annot.payload_size,
+ fields: decl.fields().map(|field| field.annot.size).collect(),
+ })
+ .collect()
+ }
+
+ #[test]
+ fn test_bitfield_annotations() {
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ enum E : 6 { X=0, Y=1 }
+ packet A {
+ a : 14,
+ b : E,
+ _reserved_ : 3,
+ _fixed_ = 3 : 4,
+ _fixed_ = X : E,
+ _size_(_payload_) : 7,
+ _payload_,
+ }
+ "#
+ ),
+ eq(vec![
+ Annotations { size: Static(6), payload_size: Static(0), fields: vec![] },
+ Annotations {
+ size: Static(40),
+ payload_size: Dynamic,
+ fields: vec![
+ Static(14),
+ Static(6),
+ Static(3),
+ Static(4),
+ Static(6),
+ Static(7),
+ Dynamic
+ ]
+ },
+ ])
+ )
+ }
+
+ #[test]
+ fn test_typedef_annotations() {
+ // Struct with constant size.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ struct S {
+ a: 8[4],
+ }
+ packet A {
+ a: 16,
+ s: S,
+ }
+ "#
+ ),
+ eq(vec![
+ Annotations { size: Static(32), payload_size: Static(0), fields: vec![Static(32)] },
+ Annotations {
+ size: Static(48),
+ payload_size: Static(0),
+ fields: vec![Static(16), Static(32)]
+ },
+ ])
+ );
+
+ // Struct with dynamic size.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ struct S {
+ _size_ (a) : 8,
+ a: 8[],
+ }
+ packet A {
+ a: 16,
+ s: S,
+ }
+ "#
+ ),
+ eq(vec![
+ Annotations {
+ size: Dynamic,
+ payload_size: Static(0),
+ fields: vec![Static(8), Dynamic]
+ },
+ Annotations {
+ size: Dynamic,
+ payload_size: Static(0),
+ fields: vec![Static(16), Dynamic]
+ },
+ ])
+ );
+
+ // Struct with unknown size.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ struct S {
+ a: 8[],
+ }
+ packet A {
+ a: 16,
+ s: S,
+ }
+ "#
+ ),
+ eq(vec![
+ Annotations { size: Unknown, payload_size: Static(0), fields: vec![Unknown] },
+ Annotations {
+ size: Unknown,
+ payload_size: Static(0),
+ fields: vec![Static(16), Unknown]
+ },
+ ])
+ );
+ }
+
+ #[test]
+ fn test_array_annotations() {
+ // Array with constant size element and constant count.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ enum E : 8 { X=0, Y=1 }
+ packet A {
+ a: E[8],
+ }
+ "#
+ ),
+ eq(vec![
+ Annotations { size: Static(8), payload_size: Static(0), fields: vec![] },
+ Annotations { size: Static(64), payload_size: Static(0), fields: vec![Static(64)] },
+ ])
+ );
+
+ // Array with dynamic size element and constant count.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ struct S { _size_(a): 8, a: 8[] }
+ packet A {
+ a: S[8],
+ }
+ "#
+ ),
+ eq(vec![
+ Annotations {
+ size: Dynamic,
+ payload_size: Static(0),
+ fields: vec![Static(8), Dynamic]
+ },
+ Annotations { size: Dynamic, payload_size: Static(0), fields: vec![Dynamic] },
+ ])
+ );
+
+ // Array with constant size element and dynamic size.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ struct S { a: 7, _reserved_: 1 }
+ packet A {
+ _size_ (a) : 8,
+ a: S[],
+ }
+ "#
+ ),
+ eq(vec![
+ Annotations {
+ size: Static(8),
+ payload_size: Static(0),
+ fields: vec![Static(7), Static(1)]
+ },
+ Annotations {
+ size: Dynamic,
+ payload_size: Static(0),
+ fields: vec![Static(8), Dynamic]
+ },
+ ])
+ );
+
+ // Array with dynamic size element and dynamic size.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ struct S { _size_(a): 8, a: 8[] }
+ packet A {
+ _size_ (a) : 8,
+ a: S[],
+ }
+ "#
+ ),
+ eq(vec![
+ Annotations {
+ size: Dynamic,
+ payload_size: Static(0),
+ fields: vec![Static(8), Dynamic]
+ },
+ Annotations {
+ size: Dynamic,
+ payload_size: Static(0),
+ fields: vec![Static(8), Dynamic]
+ },
+ ])
+ );
+
+ // Array with constant size element and dynamic count.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ struct S { a: 7, _reserved_: 1 }
+ packet A {
+ _count_ (a) : 8,
+ a: S[],
+ }
+ "#
+ ),
+ eq(vec![
+ Annotations {
+ size: Static(8),
+ payload_size: Static(0),
+ fields: vec![Static(7), Static(1)]
+ },
+ Annotations {
+ size: Dynamic,
+ payload_size: Static(0),
+ fields: vec![Static(8), Dynamic]
+ },
+ ])
+ );
+
+ // Array with dynamic size element and dynamic count.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ struct S { _size_(a): 8, a: 8[] }
+ packet A {
+ _count_ (a) : 8,
+ a: S[],
+ }
+ "#
+ ),
+ eq(vec![
+ Annotations {
+ size: Dynamic,
+ payload_size: Static(0),
+ fields: vec![Static(8), Dynamic]
+ },
+ Annotations {
+ size: Dynamic,
+ payload_size: Static(0),
+ fields: vec![Static(8), Dynamic]
+ },
+ ])
+ );
+
+ // Array with constant size element and unknown size.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ struct S { a: 7, _fixed_ = 1 : 1 }
+ packet A {
+ a: S[],
+ }
+ "#
+ ),
+ eq(vec![
+ Annotations {
+ size: Static(8),
+ payload_size: Static(0),
+ fields: vec![Static(7), Static(1)]
+ },
+ Annotations { size: Unknown, payload_size: Static(0), fields: vec![Unknown] },
+ ])
+ );
+
+ // Array with dynamic size element and unknown size.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ struct S { _size_(a): 8, a: 8[] }
+ packet A {
+ a: S[],
+ }
+ "#
+ ),
+ eq(vec![
+ Annotations {
+ size: Dynamic,
+ payload_size: Static(0),
+ fields: vec![Static(8), Dynamic]
+ },
+ Annotations { size: Unknown, payload_size: Static(0), fields: vec![Unknown] },
+ ])
+ );
+
+ // Array with padded size.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ struct S {
+ _count_(a): 40,
+ a: 16[],
+ }
+ packet A {
+ a: S[],
+ _padding_ [128],
+ }
+ "#
+ ),
+ eq(vec![
+ Annotations {
+ size: Dynamic,
+ payload_size: Static(0),
+ fields: vec![Static(40), Dynamic]
+ },
+ Annotations {
+ size: Static(1024),
+ payload_size: Static(0),
+ fields: vec![Unknown, Static(0)]
+ },
+ ])
+ );
+ }
+
+ #[test]
+ fn test_payload_annotations() {
+ // Payload with dynamic size.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ packet A {
+ _size_(_payload_) : 8,
+ _payload_
+ }
+ "#
+ ),
+ eq(vec![Annotations {
+ size: Static(8),
+ payload_size: Dynamic,
+ fields: vec![Static(8), Dynamic]
+ },])
+ );
+
+ // Payload with unknown size.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ packet A {
+ a : 8,
+ _payload_
+ }
+ "#
+ ),
+ eq(vec![Annotations {
+ size: Static(8),
+ payload_size: Unknown,
+ fields: vec![Static(8), Unknown]
+ },])
+ );
+ }
+
+ #[test]
+ fn test_body_annotations() {
+ // Body with dynamic size.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ packet A {
+ _size_(_body_) : 8,
+ _body_
+ }
+ "#
+ ),
+ eq(vec![Annotations {
+ size: Static(8),
+ payload_size: Dynamic,
+ fields: vec![Static(8), Dynamic]
+ },])
+ );
+
+ // Body with unknown size.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ packet A {
+ a : 8,
+ _body_
+ }
+ "#
+ ),
+ eq(vec![Annotations {
+ size: Static(8),
+ payload_size: Unknown,
+ fields: vec![Static(8), Unknown]
+ },])
+ );
+ }
+
+ #[test]
+ fn test_decl_annotations() {
+ // Test parent with constant size.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ packet A {
+ a: 2,
+ _reserved_: 6,
+ _payload_
+ }
+ packet B : A {
+ b: 8,
+ }
+ "#
+ ),
+ eq(vec![
+ Annotations {
+ size: Static(8),
+ payload_size: Unknown,
+ fields: vec![Static(2), Static(6), Unknown]
+ },
+ Annotations { size: Static(16), payload_size: Static(0), fields: vec![Static(8)] },
+ ])
+ );
+
+ // Test parent with dynamic size.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ packet A {
+ _size_(a) : 8,
+ a: 8[],
+ _size_(_payload_) : 8,
+ _payload_
+ }
+ packet B : A {
+ b: 8,
+ }
+ "#
+ ),
+ eq(vec![
+ Annotations {
+ size: Dynamic,
+ payload_size: Dynamic,
+ fields: vec![Static(8), Dynamic, Static(8), Dynamic]
+ },
+ Annotations { size: Dynamic, payload_size: Static(0), fields: vec![Static(8)] },
+ ])
+ );
+
+ // Test parent with unknown size.
+ assert_that!(
+ annotations(
+ r#"
+ little_endian_packets
+ packet A {
+ _size_(_payload_) : 8,
+ a: 8[],
+ _payload_
+ }
+ packet B : A {
+ b: 8,
+ }
+ "#
+ ),
+ eq(vec![
+ Annotations {
+ size: Unknown,
+ payload_size: Dynamic,
+ fields: vec![Static(8), Unknown, Dynamic]
+ },
+ Annotations { size: Unknown, payload_size: Static(0), fields: vec![Static(8)] },
+ ])
+ );
+ }
+
fn desugar(text: &str) -> analyzer::ast::File {
let mut db = SourceDatabase::new();
let file =
diff --git a/src/backends/rust.rs b/src/backends/rust.rs
index 2761b9f..cda1a8f 100644
--- a/src/backends/rust.rs
+++ b/src/backends/rust.rs
@@ -92,7 +92,7 @@ fn generate_packet_size_getter<'a>(
let mut dynamic_widths = Vec::new();
for field in fields {
- if let Some(width) = scope.get_field_width(field, false) {
+ if let Some(width) = field.annot.static_() {
constant_width += width;
continue;
}
@@ -125,10 +125,10 @@ fn generate_packet_size_getter<'a>(
self.#id.iter().map(|elem| elem.get_size()).sum::<usize>()
}
}
- Some(analyzer_ast::Decl { desc: ast::DeclDesc::Enum { .. }, .. }) => {
- let width = syn::Index::from(
- scope.get_decl_width(decl.unwrap(), false).unwrap() / 8,
- );
+ Some(analyzer_ast::Decl {
+ desc: ast::DeclDesc::Enum { width, .. }, ..
+ }) => {
+ let width = syn::Index::from(width / 8);
let mul_width = (width.index > 1).then(|| quote!(* #width));
quote! {
self.#id.len() #mul_width
@@ -1012,6 +1012,7 @@ mod tests {
use crate::ast;
use crate::parser::parse_inline;
use crate::test_utils::{assert_snapshot_eq, format_rust};
+ use googletest::prelude::{elements_are, eq, expect_that};
use paste::paste;
/// Parse a string fragment as a PDL file.
@@ -1026,16 +1027,8 @@ mod tests {
analyzer::analyze(&file).expect("analyzer error")
}
- #[track_caller]
- fn assert_iter_eq<T: std::cmp::PartialEq + std::fmt::Debug>(
- left: impl IntoIterator<Item = T>,
- right: impl IntoIterator<Item = T>,
- ) {
- assert_eq!(left.into_iter().collect::<Vec<T>>(), right.into_iter().collect::<Vec<T>>());
- }
-
- #[test]
- fn test_find_constrained_parent_fields() {
+ #[googletest::test]
+ fn test_find_constrained_parent_fields() -> googletest::Result<()> {
let code = "
little_endian_packets
packet Parent {
@@ -1058,12 +1051,17 @@ mod tests {
";
let file = parse_str(code);
let scope = lint::Scope::new(&file);
- let find_fields =
- |id| find_constrained_parent_fields(&scope, id).map(|field| field.id().unwrap());
- assert_iter_eq(find_fields("Parent"), vec![]);
- assert_iter_eq(find_fields("Child"), vec!["b", "c"]);
- assert_iter_eq(find_fields("GrandChild"), vec!["c"]);
- assert_iter_eq(find_fields("GrandGrandChild"), vec![]);
+ let find_fields = |id| {
+ find_constrained_parent_fields(&scope, id)
+ .map(|field| field.id().unwrap())
+ .collect::<Vec<_>>()
+ };
+
+ expect_that!(find_fields("Parent"), elements_are![]);
+ expect_that!(find_fields("Child"), elements_are![eq("b"), eq("c")]);
+ expect_that!(find_fields("GrandChild"), elements_are![eq("c")]);
+ expect_that!(find_fields("GrandGrandChild"), elements_are![]);
+ Ok(())
}
/// Create a unit test for the given PDL `code`.
diff --git a/src/backends/rust/parser.rs b/src/backends/rust/parser.rs
index 196ae00..33dd840 100644
--- a/src/backends/rust/parser.rs
+++ b/src/backends/rust/parser.rs
@@ -83,7 +83,7 @@ impl<'a> FieldParser<'a> {
fn add_bit_field(&mut self, field: &'a analyzer_ast::Field) {
self.chunk.push(BitField { shift: self.shift, field });
- self.shift += self.scope.get_field_width(field, false).unwrap();
+ self.shift += field.annot.size.static_().unwrap();
if self.shift % 8 != 0 {
return;
}
@@ -124,7 +124,7 @@ impl<'a> FieldParser<'a> {
v = quote! { (#v >> #shift) }
}
- let width = self.scope.get_field_width(field, false).unwrap();
+ let width = field.annot.size.static_().unwrap();
let value_type = types::Integer::new(width);
if !single_value && width < value_type.width {
// Mask value if we grabbed more than `width` and if
@@ -243,7 +243,7 @@ impl<'a> FieldParser<'a> {
let mut offset = 0;
for field in fields {
- if let Some(width) = self.scope.get_field_width(field, false) {
+ if let Some(width) = field.annot.static_() {
offset += width;
} else {
return None;
@@ -284,7 +284,7 @@ impl<'a> FieldParser<'a> {
Static(usize), // Static size in bytes.
Unknown,
}
- let element_width = match width.or_else(|| self.scope.get_decl_width(decl?, false)) {
+ let element_width = match width.or_else(|| decl.unwrap().annot.total_size().static_()) {
Some(w) => {
assert_eq!(w % 8, 0, "Array element size ({w}) is not a multiple of 8");
ElementWidth::Static(w / 8)
@@ -471,11 +471,11 @@ impl<'a> FieldParser<'a> {
let id = format_ident!("{id}");
let type_id = format_ident!("{type_id}");
- self.code.push(match self.scope.get_decl_width(decl, true) {
- None => quote! {
+ self.code.push(match decl.annot.size {
+ analyzer_ast::Size::Unknown | analyzer_ast::Size::Dynamic => quote! {
let #id = #type_id::parse_inner(&mut #span)?;
},
- Some(width) => {
+ analyzer_ast::Size::Static(width) => {
assert_eq!(width % 8, 0, "Typedef field type size is not a multiple of 8");
match &decl.desc {
ast::DeclDesc::Checksum { .. } => todo!(),
diff --git a/src/backends/rust/serializer.rs b/src/backends/rust/serializer.rs
index 70a8653..497936c 100644
--- a/src/backends/rust/serializer.rs
+++ b/src/backends/rust/serializer.rs
@@ -74,7 +74,7 @@ impl<'a> FieldSerializer<'a> {
}
fn add_bit_field(&mut self, field: &analyzer_ast::Field) {
- let width = self.scope.get_field_width(field, false).unwrap();
+ let width = field.annot.size.static_().unwrap();
let shift = self.shift;
match &field.desc {
diff --git a/src/lib.rs b/src/lib.rs
new file mode 100644
index 0000000..cd384bc
--- /dev/null
+++ b/src/lib.rs
@@ -0,0 +1,24 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//! PDL parser and analyzer.
+
+pub mod analyzer;
+pub mod ast;
+pub mod backends;
+pub mod lint;
+pub mod parser;
+#[cfg(test)]
+pub mod test_utils;
+pub mod utils;
diff --git a/src/lint.rs b/src/lint.rs
index 874596e..421f364 100644
--- a/src/lint.rs
+++ b/src/lint.rs
@@ -256,65 +256,4 @@ impl<'d> Scope<'d> {
_ => false,
}
}
-
- /// Determine the size of a field in bits, if possible.
- ///
- /// If the field is dynamically sized (e.g. unsized array or
- /// payload field), `None` is returned. If `skip_payload` is set,
- /// payload and body fields are counted as having size `0` rather
- /// than a variable size.
- pub fn get_field_width(
- &self,
- field: &analyzer_ast::Field,
- skip_payload: bool,
- ) -> Option<usize> {
- match &field.desc {
- FieldDesc::Scalar { width, .. }
- | FieldDesc::Size { width, .. }
- | FieldDesc::Count { width, .. }
- | FieldDesc::ElementSize { width, .. }
- | FieldDesc::Reserved { width, .. }
- | FieldDesc::FixedScalar { width, .. } => Some(*width),
- FieldDesc::Padding { .. } => Some(0),
- FieldDesc::Array { .. } if field.annot.padded_size.is_some() => {
- Some(field.annot.padded_size.unwrap() * 8)
- }
- FieldDesc::Array { size: Some(size), width, .. } => {
- let element_width = width
- .or_else(|| self.get_decl_width(self.get_field_declaration(field)?, false))?;
- Some(element_width * size)
- }
- FieldDesc::FixedEnum { .. } | FieldDesc::Typedef { .. } => {
- self.get_decl_width(self.get_field_declaration(field)?, false)
- }
- FieldDesc::Checksum { .. } => Some(0),
- FieldDesc::Payload { .. } | FieldDesc::Body { .. } if skip_payload => Some(0),
- _ => None,
- }
- }
-
- /// Determine the size of a declaration type in bits, if possible.
- ///
- /// If the type is dynamically sized (e.g. contains an array or
- /// payload), `None` is returned. If `skip_payload` is set,
- /// payload and body fields are counted as having size `0` rather
- /// than a variable size.
- pub fn get_decl_width(&self, decl: &analyzer_ast::Decl, skip_payload: bool) -> Option<usize> {
- match &decl.desc {
- DeclDesc::Enum { width, .. } | DeclDesc::Checksum { width, .. } => Some(*width),
- DeclDesc::CustomField { width, .. } => *width,
- DeclDesc::Packet { fields, parent_id, .. }
- | DeclDesc::Struct { fields, parent_id, .. } => {
- let mut packet_size = match parent_id {
- None => 0,
- Some(id) => self.get_decl_width(self.typedef.get(id.as_str())?, true)?,
- };
- for field in fields.iter() {
- packet_size += self.get_field_width(field, skip_payload)?;
- }
- Some(packet_size)
- }
- DeclDesc::Group { .. } | DeclDesc::Test { .. } => None,
- }
- }
}
diff --git a/src/main.rs b/src/main.rs
index 7049818..f550117 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -17,14 +17,7 @@
use argh::FromArgs;
use codespan_reporting::term::{self, termcolor};
-mod analyzer;
-mod ast;
-mod backends;
-mod lint;
-mod parser;
-#[cfg(test)]
-mod test_utils;
-mod utils;
+use pdl_compiler::{analyzer, ast, backends, parser};
#[allow(clippy::upper_case_acronyms)]
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
@@ -64,6 +57,27 @@ struct Opt {
#[argh(positional)]
/// input file.
input_file: String,
+
+ #[argh(option)]
+ /// exclude declarations from the generated output.
+ exclude_declaration: Vec<String>,
+}
+
+/// Remove declarations listed in the input filter.
+fn filter_declarations(
+ file: parser::ast::File,
+ exclude_declarations: &[String],
+) -> parser::ast::File {
+ ast::File {
+ declarations: file
+ .declarations
+ .into_iter()
+ .filter(|decl| {
+ decl.id().map(|id| !exclude_declarations.contains(&id.to_owned())).unwrap_or(true)
+ })
+ .collect(),
+ ..file
+ }
}
fn main() -> Result<(), String> {
@@ -77,6 +91,7 @@ fn main() -> Result<(), String> {
let mut sources = ast::SourceDatabase::new();
match parser::parse_file(&mut sources, opt.input_file) {
Ok(file) => {
+ let file = filter_declarations(file, &opt.exclude_declaration);
let analyzed_file = match analyzer::analyze(&file) {
Ok(file) => file,
Err(diagnostics) => {
diff --git a/src/test_utils.rs b/src/test_utils.rs
index 13a45fd..379752d 100644
--- a/src/test_utils.rs
+++ b/src/test_utils.rs
@@ -19,11 +19,9 @@
// rest of the `pdl` crate. To make this work, avoid `use crate::`
// statements below.
+use googletest::prelude::{assert_that, eq};
use std::fs;
-use std::io::Write;
use std::path::Path;
-use std::process::Command;
-use tempfile::NamedTempFile;
/// Format Rust code in `input`.
pub fn format_rust(input: &str) -> String {
@@ -32,58 +30,6 @@ pub fn format_rust(input: &str) -> String {
format!("#![rustfmt::skip]\n{formatted}")
}
-/// Find the unified diff between two strings using `diff`.
-///
-/// # Panics
-///
-/// Panics if `diff` cannot be found on `$PATH` or if it returns an
-/// error.
-pub fn diff(left_label: &str, left: &str, right_label: &str, right: &str) -> String {
- let mut temp_left = NamedTempFile::new().unwrap();
- temp_left.write_all(left.as_bytes()).unwrap();
- let mut temp_right = NamedTempFile::new().unwrap();
- temp_right.write_all(right.as_bytes()).unwrap();
-
- // We expect `diff` to be available on PATH.
- let output = Command::new("diff")
- .arg("--unified")
- .arg("--color=always")
- .arg("--label")
- .arg(left_label)
- .arg("--label")
- .arg(right_label)
- .arg(temp_left.path())
- .arg(temp_right.path())
- .output()
- .expect("failed to run diff");
- let diff_trouble_exit_code = 2; // from diff(1)
- assert_ne!(
- output.status.code().unwrap(),
- diff_trouble_exit_code,
- "diff failed: {}",
- output.status
- );
- String::from_utf8(output.stdout).expect("diff output was not UTF-8")
-}
-
-/// Compare two strings and output a diff if they are not equal.
-#[track_caller]
-pub fn assert_eq_with_diff(left_label: &str, left: &str, right_label: &str, right: &str) {
- assert!(
- left == right,
- "texts did not match, diff:\n{}\n",
- diff(left_label, left, right_label, right)
- );
-}
-
-/// Check that `haystack` contains `needle`.
-///
-/// Panic with a nice message if not.
-#[track_caller]
-pub fn assert_contains(haystack: &str, needle: &str) {
- assert!(haystack.contains(needle), "Could not find {:?} in {:?}", needle, haystack);
-}
-
/// Compare a string with a snapshot file.
///
/// The `snapshot_path` is relative to the current working directory
@@ -115,12 +61,7 @@ pub fn assert_snapshot_eq<P: AsRef<Path>>(snapshot_path: P, actual_content: &str
// Normal comparison if UPDATE_SNAPSHOTS is unset.
if !update_snapshots {
- return assert_eq_with_diff(
- snapshot.to_str().unwrap(),
- &snapshot_content,
- "actual",
- actual_content,
- );
+ assert_that!(actual_content, eq(&snapshot_content));
}
// Bail out if we are not using Cargo.
@@ -140,31 +81,3 @@ pub fn assert_snapshot_eq<P: AsRef<Path>>(snapshot_path: P, actual_content: &str
});
}
}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- #[test]
- fn test_diff_labels_with_special_chars() {
- // Check that special characters in labels are passed
- // correctly to diff.
- let patch = diff("left 'file'", "foo\nbar\n", "right ~file!", "foo\nnew line\nbar\n");
- assert_contains(&patch, "left 'file'");
- assert_contains(&patch, "right ~file!");
- }
-
- #[test]
- #[should_panic]
- fn test_assert_eq_with_diff_on_diff() {
- // We use identical labels to check that we haven't
- // accidentally mixed up the labels with the file content.
- assert_eq_with_diff("", "foo\nbar\n", "", "foo\nnew line\nbar\n");
- }
-
- #[test]
- fn test_assert_eq_with_diff_on_eq() {
- // No panic when there is no diff.
- assert_eq_with_diff("left", "foo\nbar\n", "right", "foo\nbar\n");
- }
-}
diff --git a/tests/python_generator_test.py b/tests/python_generator_test.py
index dbd0c5b..fdd04a8 100644
--- a/tests/python_generator_test.py
+++ b/tests/python_generator_test.py
@@ -25,12 +25,12 @@ import typing_extensions
import unittest
from importlib import resources
-# (le|be)_pdl_test are the names of the modules generated from the canonical
+# (le|be)_backend are the names of the modules generated from the canonical
# little endian and big endian test grammars. The purpose of this module
# is to validate the generated parsers against the set of pre-generated
# test vectors in canonical/(le|be)_test_vectors.json.
-import le_pdl_test
-import be_pdl_test
+import le_backend
+import be_backend
def match_object(self, left, right):
@@ -94,7 +94,7 @@ class PacketParserTest(unittest.TestCase):
# Retrieve the class object from the generated
# module, in order to invoke the proper parse
# method for this test.
- cls = getattr(le_pdl_test, packet)
+ cls = getattr(le_backend, packet)
for test in tests:
result = cls.parse_all(bytes.fromhex(test['packed']))
match_object(self, result, test['unpacked'])
@@ -113,7 +113,7 @@ class PacketParserTest(unittest.TestCase):
# Retrieve the class object from the generated
# module, in order to invoke the proper constructor
# method for this test.
- cls = getattr(be_pdl_test, packet)
+ cls = getattr(be_backend, packet)
for test in tests:
result = cls.parse_all(bytes.fromhex(test['packed']))
match_object(self, result, test['unpacked'])
@@ -138,7 +138,7 @@ class PacketSerializerTest(unittest.TestCase):
# module, in order to invoke the proper constructor
# method for this test.
for test in tests:
- cls = getattr(le_pdl_test, test.get('packet', packet))
+ cls = getattr(le_backend, test.get('packet', packet))
obj = create_object(cls, test['unpacked'])
result = obj.serialize()
self.assertEqual(result, bytes.fromhex(test['packed']))
@@ -158,7 +158,7 @@ class PacketSerializerTest(unittest.TestCase):
# module, in order to invoke the proper parse
# method for this test.
for test in tests:
- cls = getattr(be_pdl_test, test.get('packet', packet))
+ cls = getattr(be_backend, test.get('packet', packet))
obj = create_object(cls, test['unpacked'])
result = obj.serialize()
self.assertEqual(result, bytes.fromhex(test['packed']))
@@ -168,28 +168,28 @@ class CustomPacketParserTest(unittest.TestCase):
"""Manual testing for custom fields."""
def testCustomField(self):
- result = le_pdl_test.Packet_Custom_Field_ConstantSize.parse_all([1])
+ result = le_backend.Packet_Custom_Field_ConstantSize.parse_all([1])
self.assertEqual(result.a.value, 1)
- result = le_pdl_test.Packet_Custom_Field_VariableSize.parse_all([1])
+ result = le_backend.Packet_Custom_Field_VariableSize.parse_all([1])
self.assertEqual(result.a.value, 1)
- result = le_pdl_test.Struct_Custom_Field_ConstantSize.parse_all([1])
+ result = le_backend.Struct_Custom_Field_ConstantSize.parse_all([1])
self.assertEqual(result.s.a.value, 1)
- result = le_pdl_test.Struct_Custom_Field_VariableSize.parse_all([1])
+ result = le_backend.Struct_Custom_Field_VariableSize.parse_all([1])
self.assertEqual(result.s.a.value, 1)
- result = be_pdl_test.Packet_Custom_Field_ConstantSize.parse_all([1])
+ result = be_backend.Packet_Custom_Field_ConstantSize.parse_all([1])
self.assertEqual(result.a.value, 1)
- result = be_pdl_test.Packet_Custom_Field_VariableSize.parse_all([1])
+ result = be_backend.Packet_Custom_Field_VariableSize.parse_all([1])
self.assertEqual(result.a.value, 1)
- result = be_pdl_test.Struct_Custom_Field_ConstantSize.parse_all([1])
+ result = be_backend.Struct_Custom_Field_ConstantSize.parse_all([1])
self.assertEqual(result.s.a.value, 1)
- result = be_pdl_test.Struct_Custom_Field_VariableSize.parse_all([1])
+ result = be_backend.Struct_Custom_Field_VariableSize.parse_all([1])
self.assertEqual(result.s.a.value, 1)
diff --git a/tests/run_cxx_generator_tests.sh b/tests/run_cxx_generator_tests.sh
new file mode 100755
index 0000000..d595747
--- /dev/null
+++ b/tests/run_cxx_generator_tests.sh
@@ -0,0 +1,46 @@
+#!/usr/bin/env bash
+
+set -euxo pipefail
+
+mkdir -p out/
+
+sed -e 's/little_endian_packets/big_endian_packets/' \
+ -e '/Start: little_endian_only/,/End: little_endian_only/d' \
+ < tests/canonical/le_test_file.pdl > out/be_test_file.pdl
+
+pdlc tests/canonical/le_test_file.pdl > out/le_test_file.json
+pdlc out/be_test_file.pdl > out/be_test_file.json
+
+python3 scripts/generate_cxx_backend.py \
+ --input out/le_test_file.json \
+ --output out/le_backend.h \
+ --namespace le_backend
+python3 scripts/generate_cxx_backend.py \
+ --input out/be_test_file.json \
+ --output out/be_backend.h \
+ --namespace be_backend
+
+python3 scripts/generate_cxx_backend_tests.py \
+ --input out/le_test_file.json \
+ --output out/le_backend_tests.cc \
+ --test-vectors tests/canonical/le_test_vectors.json \
+ --namespace le_backend \
+ --parser-test-suite le_backend_parser_test \
+ --serializer-test-suite le_backend_serializer_test \
+ --include-header le_backend.h
+python3 scripts/generate_cxx_backend_tests.py \
+ --input out/be_test_file.json \
+ --output out/be_backend_tests.cc \
+ --test-vectors tests/canonical/be_test_vectors.json \
+ --namespace be_backend \
+ --parser-test-suite be_backend_parser_test \
+ --serializer-test-suite be_backend_serializer_test \
+ --include-header be_backend.h
+
+g++ -Iscripts -Iout \
+ out/le_backend_tests.cc \
+ out/be_backend_tests.cc \
+ -lgtest -lgtest_main -o out/cxx_backend_tests
+
+./out/cxx_backend_tests \
+ --gtest_output="xml:out/cxx_backend_tests_detail.xml"
diff --git a/tests/run_python_generator_tests.sh b/tests/run_python_generator_tests.sh
new file mode 100755
index 0000000..64b39c6
--- /dev/null
+++ b/tests/run_python_generator_tests.sh
@@ -0,0 +1,24 @@
+#!/usr/bin/env bash
+
+set -euxo pipefail
+
+mkdir -p out/
+
+sed -e 's/little_endian_packets/big_endian_packets/' \
+ -e '/Start: little_endian_only/,/End: little_endian_only/d' \
+ < tests/canonical/le_test_file.pdl > out/be_test_file.pdl
+
+pdlc tests/canonical/le_test_file.pdl > out/le_test_file.json
+pdlc out/be_test_file.pdl > out/be_test_file.json
+
+python3 scripts/generate_python_backend.py \
+ --input out/le_test_file.json \
+ --output out/le_backend.py \
+ --custom-type-location tests.custom_types
+python3 scripts/generate_python_backend.py \
+ --input out/be_test_file.json \
+ --output out/be_backend.py \
+ --custom-type-location tests.custom_types
+
+export PYTHONPATH="./out:.:${PYTHONPATH:-}"
+python3 tests/python_generator_test.py