aboutsummaryrefslogtreecommitdiff
path: root/src/analyzer.rs
diff options
context:
space:
mode:
Diffstat (limited to 'src/analyzer.rs')
-rw-r--r--src/analyzer.rs390
1 files changed, 381 insertions, 9 deletions
diff --git a/src/analyzer.rs b/src/analyzer.rs
index d1cf84e..b80a6b6 100644
--- a/src/analyzer.rs
+++ b/src/analyzer.rs
@@ -190,6 +190,12 @@ pub enum ErrorCode {
E42 = 42,
E43 = 43,
DuplicateDefaultTag = 44,
+ InvalidOptionalField = 45,
+ UndeclaredConditionIdentifier = 46,
+ InvalidConditionIdentifier = 47,
+ InvalidConditionValue = 48,
+ E49 = 49,
+ ReusedConditionIdentifier = 50,
}
impl From<ErrorCode> for String {
@@ -351,6 +357,7 @@ impl<'d, A: Annotation + Default> Scope<'d, A> {
| FieldDesc::FixedScalar { .. }
| FieldDesc::Reserved { .. }
| FieldDesc::Group { .. }
+ | FieldDesc::Flag { .. }
| FieldDesc::Scalar { .. }
| FieldDesc::Array { type_id: None, .. } => None,
FieldDesc::FixedEnum { enum_id: type_id, .. }
@@ -368,6 +375,7 @@ impl<'d, A: Annotation + Default> Scope<'d, A> {
| FieldDesc::FixedScalar { .. }
| FieldDesc::FixedEnum { .. }
| FieldDesc::Reserved { .. }
+ | FieldDesc::Flag { .. }
| FieldDesc::Scalar { .. } => true,
FieldDesc::Typedef { type_id, .. } => {
let field = self.typedef.get(type_id.as_str());
@@ -1405,6 +1413,120 @@ fn check_checksum_fields(
Ok(())
}
+/// Check optional fields.
+/// Raises error diagnostics for the following cases:
+/// - invalid optional field
+/// - invalid constraint identifier
+/// - invalid constraint scalar value (bad type)
+/// - invalid constraint scalar value (overflow)
+fn check_optional_fields(file: &parser_ast::File) -> Result<(), Diagnostics> {
+ let mut diagnostics: Diagnostics = Default::default();
+ for decl in &file.declarations {
+ let mut local_scope: HashMap<String, &parser_ast::Field> = HashMap::new();
+ let mut condition_ids: HashMap<String, &parser_ast::Field> = HashMap::new();
+ for field in decl.fields() {
+ if let Some(ref cond) = field.cond {
+ match &field.desc {
+ FieldDesc::Scalar { .. } | FieldDesc::Typedef { .. } => (),
+ _ => diagnostics.push(
+ Diagnostic::error()
+ .with_code(ErrorCode::InvalidOptionalField)
+ .with_message("invalid optional field".to_owned())
+ .with_labels(vec![field.loc.primary()])
+ .with_notes(vec!["note: expected scalar, or typedef field".to_owned()]),
+ ),
+ }
+ match local_scope.get(&cond.id) {
+ None => diagnostics.push(
+ Diagnostic::error()
+ .with_code(ErrorCode::UndeclaredConditionIdentifier)
+ .with_message("undeclared condition identifier".to_owned())
+ .with_labels(vec![field.loc.primary()])
+ .with_notes(vec!["note: expected scalar field identifier".to_owned()]),
+ ),
+ Some(Field { cond: Some(_), loc, .. }) => diagnostics.push(
+ Diagnostic::error()
+ .with_code(ErrorCode::E49)
+ .with_message("invalid condition identifier".to_owned())
+ .with_labels(vec![
+ field.loc.primary(),
+ loc.secondary().with_message(format!(
+ "`{}` is declared optional here",
+ cond.id
+ )),
+ ])
+ .with_notes(vec!["note: expected scalar field identifier".to_owned()]),
+ ),
+ Some(Field { desc: FieldDesc::Scalar { width: 1, .. }, .. }) => (),
+ Some(Field { desc: FieldDesc::Scalar { width, .. }, loc, .. }) => diagnostics
+ .push(
+ Diagnostic::error()
+ .with_code(ErrorCode::InvalidConditionIdentifier)
+ .with_message("invalid condition identifier".to_owned())
+ .with_labels(vec![
+ field.loc.primary(),
+ loc.secondary().with_message(format!(
+ "`{}` is declared with width `{}` here",
+ cond.id, width
+ )),
+ ])
+ .with_notes(vec![
+ "note: expected scalar field identifier".to_owned()
+ ]),
+ ),
+ Some(Field { loc, .. }) => diagnostics.push(
+ Diagnostic::error()
+ .with_code(ErrorCode::InvalidConditionIdentifier)
+ .with_message("invalid condition identifier".to_owned())
+ .with_labels(vec![
+ field.loc.primary(),
+ loc.secondary()
+ .with_message(format!("`{}` is declared here", cond.id)),
+ ])
+ .with_notes(vec!["note: expected scalar field identifier".to_owned()]),
+ ),
+ }
+ match (&cond.value, &cond.tag_id) {
+ (_, Some(_)) => diagnostics.push(
+ Diagnostic::error()
+ .with_code(ErrorCode::InvalidConditionValue)
+ .with_message("invalid condition value".to_owned())
+ .with_labels(vec![field.loc.primary()])
+ .with_notes(vec!["note: expected 0 or 1".to_owned()]),
+ ),
+ (Some(0), _) | (Some(1), _) => (),
+ (Some(_), _) => diagnostics.push(
+ Diagnostic::error()
+ .with_code(ErrorCode::InvalidConditionValue)
+ .with_message("invalid condition value".to_owned())
+ .with_labels(vec![field.loc.primary()])
+ .with_notes(vec!["note: expected 0 or 1".to_owned()]),
+ ),
+ _ => unreachable!(),
+ }
+ if let Some(prev_field) = condition_ids.insert(cond.id.to_owned(), field) {
+ diagnostics.push(
+ Diagnostic::error()
+ .with_code(ErrorCode::ReusedConditionIdentifier)
+ .with_message("reused condition identifier".to_owned())
+ .with_labels(vec![
+ field.loc.primary(),
+ prev_field
+ .loc
+ .secondary()
+ .with_message(format!("`{}` is first used here", cond.id)),
+ ]),
+ )
+ }
+ }
+ if let Some(id) = field.id() {
+ local_scope.insert(id.to_owned(), field);
+ }
+ }
+ }
+ diagnostics.err_or(())
+}
+
/// Check correct definition of packet sizes.
/// Annotate fields and declarations with the size in bits.
fn compute_field_sizes(file: &parser_ast::File) -> ast::File {
@@ -1478,6 +1600,7 @@ fn compute_field_sizes(file: &parser_ast::File) -> ast::File {
scope: &HashMap<String, ast::DeclAnnotation>,
) -> ast::Field {
field.annotate(match &field.desc {
+ _ if field.cond.is_some() => ast::FieldAnnotation::new(ast::Size::Dynamic),
FieldDesc::Checksum { .. } | FieldDesc::Padding { .. } => {
ast::FieldAnnotation::new(ast::Size::Static(0))
}
@@ -1489,6 +1612,7 @@ fn compute_field_sizes(file: &parser_ast::File) -> ast::File {
| FieldDesc::Scalar { width, .. } => {
ast::FieldAnnotation::new(ast::Size::Static(*width))
}
+ FieldDesc::Flag { .. } => ast::FieldAnnotation::new(ast::Size::Static(1)),
FieldDesc::Body | FieldDesc::Payload { .. } => {
let has_payload_size = decl.fields().any(|field| match &field.desc {
FieldDesc::Size { field_id, .. } => {
@@ -1583,6 +1707,7 @@ fn inline_groups(file: &parser_ast::File) -> Result<parser_ast::File, Diagnostic
},
loc: field.loc,
annot: field.annot,
+ cond: field.cond.clone(),
}]
}
FieldDesc::Typedef { id, type_id, .. } if constraints.contains_key(id) => {
@@ -1596,6 +1721,7 @@ fn inline_groups(file: &parser_ast::File) -> Result<parser_ast::File, Diagnostic
},
loc: field.loc,
annot: field.annot,
+ cond: field.cond.clone(),
}]
}
_ => vec![field.clone()],
@@ -1649,6 +1775,42 @@ fn inline_groups(file: &parser_ast::File) -> Result<parser_ast::File, Diagnostic
})
}
+/// Replace Scalar fields used as condition for optional fields by the more
+/// specific Flag construct.
+fn desugar_flags(file: &mut parser_ast::File) {
+ for decl in &mut file.declarations {
+ match &mut decl.desc {
+ DeclDesc::Packet { fields, .. }
+ | DeclDesc::Struct { fields, .. }
+ | DeclDesc::Group { fields, .. } => {
+ // Gather information about condition flags.
+ let mut condition_ids: HashMap<String, (String, usize)> = HashMap::new();
+ for field in fields.iter() {
+ if let Some(ref cond) = field.cond {
+ condition_ids.insert(
+ cond.id.to_owned(),
+ (field.id().unwrap().to_owned(), cond.value.unwrap()),
+ );
+ }
+ }
+ // Replace condition flags in the fields.
+ for field in fields.iter_mut() {
+ if let Some((optional_field_id, set_value)) =
+ field.id().and_then(|id| condition_ids.get(id))
+ {
+ field.desc = FieldDesc::Flag {
+ id: field.id().unwrap().to_owned(),
+ optional_field_id: optional_field_id.to_owned(),
+ set_value: *set_value,
+ }
+ }
+ }
+ }
+ _ => (),
+ }
+ }
+}
+
/// Analyzer entry point, produces a new AST with annotations resulting
/// from the analysis.
pub fn analyze(file: &parser_ast::File) -> Result<ast::File, Diagnostics> {
@@ -1662,8 +1824,10 @@ pub fn analyze(file: &parser_ast::File) -> Result<ast::File, Diagnostics> {
check_array_fields(file)?;
check_padding_fields(file)?;
check_checksum_fields(file, &scope)?;
+ check_optional_fields(file)?;
check_group_constraints(file, &scope)?;
- let file = inline_groups(file)?;
+ let mut file = inline_groups(file)?;
+ desugar_flags(&mut file);
let scope = Scope::new(&file)?;
check_decl_constraints(&file, &scope)?;
Ok(compute_field_sizes(&file))
@@ -1681,8 +1845,7 @@ mod test {
macro_rules! raises {
($code:ident, $text:literal) => {{
let mut db = SourceDatabase::new();
- let file = parse_inline(&mut db, "stdin".to_owned(), $text.to_owned())
- .expect("parsing failure");
+ let file = parse_inline(&mut db, "stdin", $text.to_owned()).expect("parsing failure");
let result = analyzer::analyze(&file);
assert!(matches!(result, Err(_)));
let diagnostics = result.err().unwrap();
@@ -1697,8 +1860,7 @@ mod test {
macro_rules! valid {
($text:literal) => {{
let mut db = SourceDatabase::new();
- let file = parse_inline(&mut db, "stdin".to_owned(), $text.to_owned())
- .expect("parsing failure");
+ let file = parse_inline(&mut db, "stdin", $text.to_owned()).expect("parsing failure");
assert!(analyzer::analyze(&file).is_ok());
}};
}
@@ -2691,6 +2853,218 @@ mod test {
}
#[test]
+ fn test_e45() {
+ valid!(
+ r#"
+ little_endian_packets
+ packet B {
+ c : 1,
+ _reserved_ : 7,
+ x : 8 if c = 1,
+ }
+ "#
+ );
+
+ valid!(
+ r#"
+ little_endian_packets
+ enum A : 8 { X = 0 }
+ packet B {
+ c : 1,
+ _reserved_ : 7,
+ x : A if c = 0,
+ }
+ "#
+ );
+
+ raises!(
+ InvalidOptionalField,
+ r#"
+ little_endian_packets
+ packet B {
+ c : 1,
+ _reserved_ : 7,
+ x : 8[] if c = 1,
+ }
+ "#
+ );
+
+ raises!(
+ InvalidOptionalField,
+ r#"
+ little_endian_packets
+ packet A {
+ c : 1,
+ _reserved_ : 7,
+ _size_(x) : 8 if c = 1,
+ x : 8[],
+ }
+ "#
+ );
+
+ raises!(
+ InvalidOptionalField,
+ r#"
+ little_endian_packets
+ packet B {
+ c : 1,
+ _reserved_ : 7,
+ x : 8[],
+ _padding_ [10] if c = 1,
+ }
+ "#
+ );
+
+ raises!(
+ InvalidOptionalField,
+ r#"
+ little_endian_packets
+ packet B {
+ c : 1,
+ _reserved_ : 7,
+ _reserved_ : 8 if c = 1,
+ }
+ "#
+ );
+
+ raises!(
+ InvalidOptionalField,
+ r#"
+ little_endian_packets
+ packet B {
+ c : 1,
+ _reserved_ : 7,
+ _fixed_ = 0x42 : 8 if c = 1,
+ }
+ "#
+ );
+
+ raises!(
+ InvalidOptionalField,
+ r#"
+ little_endian_packets
+ enum A : 8 { X = 0 }
+ packet B {
+ c : 1,
+ _reserved_ : 7,
+ _fixed_ = X : A if c = 1,
+ }
+ "#
+ );
+ }
+
+ #[test]
+ fn test_e46() {
+ raises!(
+ UndeclaredConditionIdentifier,
+ r#"
+ little_endian_packets
+ packet B {
+ x : 8 if c = 1,
+ _reserved_ : 7,
+ }
+ "#
+ );
+ }
+
+ #[test]
+ fn test_e47() {
+ raises!(
+ InvalidConditionIdentifier,
+ r#"
+ little_endian_packets
+ enum A : 8 { X = 0 }
+ packet B {
+ c : A,
+ x : 8 if c = 1,
+ }
+ "#
+ );
+
+ raises!(
+ InvalidConditionIdentifier,
+ r#"
+ little_endian_packets
+ packet B {
+ c : 8[],
+ x : 8 if c = 1,
+ }
+ "#
+ );
+
+ raises!(
+ InvalidConditionIdentifier,
+ r#"
+ little_endian_packets
+ packet B {
+ c : 8,
+ x : 8 if c = 1,
+ }
+ "#
+ );
+ }
+
+ #[test]
+ fn test_e48() {
+ raises!(
+ InvalidConditionValue,
+ r#"
+ little_endian_packets
+ packet B {
+ c : 1,
+ _reserved_ : 7,
+ x : 8 if c = A,
+ }
+ "#
+ );
+
+ raises!(
+ InvalidConditionValue,
+ r#"
+ little_endian_packets
+ packet B {
+ c : 1,
+ _reserved_ : 7,
+ x : 8 if c = 2,
+ }
+ "#
+ );
+ }
+
+ #[test]
+ fn test_e49() {
+ raises!(
+ E49,
+ r#"
+ little_endian_packets
+ packet B {
+ c0 : 1,
+ _reserved_ : 7,
+ c1 : 1 if c0 = 1,
+ _reserved_ : 7,
+ x : 8 if c1 = 1,
+ }
+ "#
+ );
+ }
+
+ #[test]
+ fn test_e50() {
+ raises!(
+ ReusedConditionIdentifier,
+ r#"
+ little_endian_packets
+ packet B {
+ c : 1,
+ _reserved_ : 7,
+ x : 8 if c = 1,
+ y : 8 if c = 0,
+ }
+ "#
+ );
+ }
+
+ #[test]
fn test_enum_declaration() {
valid!(
r#"
@@ -2750,8 +3124,7 @@ mod test {
fn annotations(text: &str) -> Vec<Annotations> {
let mut db = SourceDatabase::new();
- let file =
- parse_inline(&mut db, "stdin".to_owned(), text.to_owned()).expect("parsing failure");
+ let file = parse_inline(&mut db, "stdin", text.to_owned()).expect("parsing failure");
let file = analyzer::analyze(&file).expect("analyzer failure");
file.declarations
.iter()
@@ -3255,8 +3628,7 @@ mod test {
fn desugar(text: &str) -> analyzer::ast::File {
let mut db = SourceDatabase::new();
- let file =
- parse_inline(&mut db, "stdin".to_owned(), text.to_owned()).expect("parsing failure");
+ let file = parse_inline(&mut db, "stdin", text.to_owned()).expect("parsing failure");
analyzer::analyze(&file).expect("analyzer failure")
}