diff options
author | Henri Chataing <henrichataing@google.com> | 2023-08-12 00:08:10 +0000 |
---|---|---|
committer | Henri Chataing <henrichataing@google.com> | 2023-08-12 00:08:10 +0000 |
commit | b32d1823b491678db6f89438450945e8c973b5e2 (patch) | |
tree | 897836674c74faac90707276de61c91a3c2e8c6b | |
parent | 6e418cc0c7efdd32cb92b38b3bbcae061436be75 (diff) | |
download | pdl-compiler-b32d1823b491678db6f89438450945e8c973b5e2.tar.gz |
Upgrade pdl-compiler to 0.1.6emu-33-dev
This project was upgraded with external_updater.
Usage: tools/external_updater/updater.sh update rust/crates/pdl-compiler
For more info, check https://cs.android.com/android/platform/superproject/+/main:tools/external_updater/README.md
Test: TreeHugger
Change-Id: I96ddca3eebd1c7ca647fa750f6329c83d05ab9ed
-rw-r--r-- | .cargo_vcs_info.json | 2 | ||||
-rw-r--r-- | Android.bp | 6 | ||||
-rw-r--r-- | Cargo.toml | 2 | ||||
-rw-r--r-- | Cargo.toml.orig | 2 | ||||
-rw-r--r-- | METADATA | 8 | ||||
-rw-r--r-- | src/analyzer.rs | 425 | ||||
-rw-r--r-- | src/ast.rs | 8 | ||||
-rw-r--r-- | src/lib.rs | 1 | ||||
-rw-r--r-- | src/parser.rs | 2 | ||||
-rw-r--r-- | src/utils.rs | 67 |
10 files changed, 254 insertions, 269 deletions
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json index ee1b6ed..fb68fdd 100644 --- a/.cargo_vcs_info.json +++ b/.cargo_vcs_info.json @@ -1,6 +1,6 @@ { "git": { - "sha1": "63488f0151c18cc5d2a1e22b1be668464605f43a" + "sha1": "79fcfdf26cf927144ddde1ecf83d0716441c5e45" }, "path_in_vcs": "" }
\ No newline at end of file @@ -7,7 +7,7 @@ rust_binary_host { name: "generate_canonical_tests", crate_name: "generate_canonical_tests", cargo_env_compat: true, - cargo_pkg_version: "0.1.5", + cargo_pkg_version: "0.1.6", srcs: ["src/bin/generate-canonical-tests.rs"], edition: "2021", features: [ @@ -37,7 +37,7 @@ rust_library_host { name: "libpdl_compiler", crate_name: "pdl_compiler", cargo_env_compat: true, - cargo_pkg_version: "0.1.5", + cargo_pkg_version: "0.1.6", srcs: ["src/lib.rs"], edition: "2021", features: [ @@ -66,7 +66,7 @@ rust_binary_host { name: "pdlc", crate_name: "pdlc", cargo_env_compat: true, - cargo_pkg_version: "0.1.5", + cargo_pkg_version: "0.1.6", srcs: ["src/main.rs"], edition: "2021", features: [ @@ -12,7 +12,7 @@ [package] edition = "2021" name = "pdl-compiler" -version = "0.1.5" +version = "0.1.6" authors = [ "Henri Chataing <henrichataing@google.com>", "David de Jesus Duarte <licorne@google.com>", diff --git a/Cargo.toml.orig b/Cargo.toml.orig index a78d3f5..f1e9ae0 100644 --- a/Cargo.toml.orig +++ b/Cargo.toml.orig @@ -1,6 +1,6 @@ [package] name = "pdl-compiler" -version = "0.1.5" +version = "0.1.6" edition = "2021" description = "Parser and serializer generator for protocol binary packets" repository = "https://github.com/google/pdl/" @@ -11,13 +11,13 @@ third_party { } url { type: ARCHIVE - value: "https://static.crates.io/crates/pdl-compiler/pdl-compiler-0.1.5.crate" + value: "https://static.crates.io/crates/pdl-compiler/pdl-compiler-0.1.6.crate" } - version: "0.1.5" + version: "0.1.6" license_type: NOTICE last_upgrade_date { year: 2023 - month: 7 - day: 26 + month: 8 + day: 12 } } diff --git a/src/analyzer.rs b/src/analyzer.rs index c89d3d0..d1cf84e 100644 --- a/src/analyzer.rs +++ b/src/analyzer.rs @@ -20,7 +20,6 @@ use std::collections::HashMap; use crate::ast::*; use crate::parser::ast as parser_ast; -use crate::utils; pub mod ast { use serde::Serialize; @@ -845,184 +844,176 @@ fn check_enum_declarations(file: &parser_ast::File) -> Result<(), Diagnostics> { diagnostics.err_or(()) } -/// Check constraints. -/// Raises error diagnostics for the following cases: -/// - undeclared constraint identifier -/// - invalid constraint identifier -/// - invalid constraint scalar value (bad type) -/// - invalid constraint scalar value (overflow) -/// - invalid constraint enum value (bad type) -/// - invalid constraint enum value (undeclared tag) -/// - duplicate constraint -fn check_constraints( - file: &parser_ast::File, +/// Helper function for validating one constraint. +fn check_constraint( + constraint: &Constraint, + decl: &parser_ast::Decl, scope: &Scope<parser_ast::Annotation>, -) -> Result<(), Diagnostics> { - fn check_constraint( - constraint: &Constraint, - decl: &parser_ast::Decl, - scope: &Scope<parser_ast::Annotation>, - diagnostics: &mut Diagnostics, - ) { - match scope.iter_fields(decl).find(|field| field.id() == Some(&constraint.id)) { - None => diagnostics.push( - Diagnostic::error() - .with_code(ErrorCode::UndeclaredConstraintIdentifier) - .with_message(format!("undeclared constraint identifier `{}`", constraint.id)) - .with_labels(vec![constraint.loc.primary()]) - .with_notes(vec!["hint: expected scalar or typedef identifier".to_owned()]), - ), - Some(field @ Field { desc: FieldDesc::Array { .. }, .. }) => diagnostics.push( - Diagnostic::error() - .with_code(ErrorCode::InvalidConstraintIdentifier) - .with_message(format!("invalid constraint identifier `{}`", constraint.id)) - .with_labels(vec![ - constraint.loc.primary(), - field.loc.secondary().with_message(format!( - "`{}` is declared here as array field", - constraint.id - )), - ]) - .with_notes(vec!["hint: expected scalar or typedef identifier".to_owned()]), - ), - Some(field @ Field { desc: FieldDesc::Scalar { width, .. }, .. }) => { - match constraint.value { + diagnostics: &mut Diagnostics, +) { + match scope.iter_fields(decl).find(|field| field.id() == Some(&constraint.id)) { + None => diagnostics.push( + Diagnostic::error() + .with_code(ErrorCode::UndeclaredConstraintIdentifier) + .with_message(format!("undeclared constraint identifier `{}`", constraint.id)) + .with_labels(vec![constraint.loc.primary()]) + .with_notes(vec!["hint: expected scalar or typedef identifier".to_owned()]), + ), + Some(field @ Field { desc: FieldDesc::Array { .. }, .. }) => diagnostics.push( + Diagnostic::error() + .with_code(ErrorCode::InvalidConstraintIdentifier) + .with_message(format!("invalid constraint identifier `{}`", constraint.id)) + .with_labels(vec![ + constraint.loc.primary(), + field.loc.secondary().with_message(format!( + "`{}` is declared here as array field", + constraint.id + )), + ]) + .with_notes(vec!["hint: expected scalar or typedef identifier".to_owned()]), + ), + Some(field @ Field { desc: FieldDesc::Scalar { width, .. }, .. }) => { + match constraint.value { + None => diagnostics.push( + Diagnostic::error() + .with_code(ErrorCode::E17) + .with_message(format!( + "invalid constraint value `{}`", + constraint.tag_id.as_ref().unwrap() + )) + .with_labels(vec![ + constraint.loc.primary(), + field.loc.secondary().with_message(format!( + "`{}` is declared here as scalar field", + constraint.id + )), + ]) + .with_notes(vec!["hint: expected scalar value".to_owned()]), + ), + Some(value) if bit_width(value) > *width => diagnostics.push( + Diagnostic::error() + .with_code(ErrorCode::ConstraintValueOutOfRange) + .with_message(format!( + "constraint value `{}` is larger than maximum value", + value + )) + .with_labels(vec![constraint.loc.primary(), field.loc.secondary()]), + ), + _ => (), + } + } + Some(field @ Field { desc: FieldDesc::Typedef { type_id, .. }, .. }) => { + match scope.typedef.get(type_id) { + None => (), + Some(Decl { desc: DeclDesc::Enum { tags, .. }, .. }) => match &constraint.tag_id { None => diagnostics.push( Diagnostic::error() - .with_code(ErrorCode::E17) + .with_code(ErrorCode::E19) .with_message(format!( "invalid constraint value `{}`", - constraint.tag_id.as_ref().unwrap() - )) - .with_labels(vec![ - constraint.loc.primary(), - field.loc.secondary().with_message(format!( - "`{}` is declared here as scalar field", - constraint.id - )), - ]) - .with_notes(vec!["hint: expected scalar value".to_owned()]), - ), - Some(value) if bit_width(value) > *width => diagnostics.push( - Diagnostic::error() - .with_code(ErrorCode::ConstraintValueOutOfRange) - .with_message(format!( - "constraint value `{}` is larger than maximum value", - value - )) - .with_labels(vec![constraint.loc.primary(), field.loc.secondary()]), - ), - _ => (), - } - } - Some(field @ Field { desc: FieldDesc::Typedef { type_id, .. }, .. }) => { - match scope.typedef.get(type_id) { - None => (), - Some(Decl { desc: DeclDesc::Enum { tags, .. }, .. }) => { - match &constraint.tag_id { - None => diagnostics.push( - Diagnostic::error() - .with_code(ErrorCode::E19) - .with_message(format!( - "invalid constraint value `{}`", - constraint.value.unwrap() - )) - .with_labels(vec![ - constraint.loc.primary(), - field.loc.secondary().with_message(format!( - "`{}` is declared here as typedef field", - constraint.id - )), - ]) - .with_notes(vec!["hint: expected enum value".to_owned()]), - ), - Some(tag_id) => match tags.iter().find(|tag| tag.id() == tag_id) { - None => diagnostics.push( - Diagnostic::error() - .with_code(ErrorCode::E20) - .with_message(format!("undeclared enum tag `{}`", tag_id)) - .with_labels(vec![ - constraint.loc.primary(), - field.loc.secondary().with_message(format!( - "`{}` is declared here", - constraint.id - )), - ]), - ), - Some(Tag::Range { .. }) => diagnostics.push( - Diagnostic::error() - .with_code(ErrorCode::E42) - .with_message(format!( - "enum tag `{}` defines a range", - tag_id - )) - .with_labels(vec![ - constraint.loc.primary(), - field.loc.secondary().with_message(format!( - "`{}` is declared here", - constraint.id - )), - ]) - .with_notes(vec![ - "hint: expected enum tag with value".to_owned() - ]), - ), - Some(_) => (), - }, - } - } - Some(decl) => diagnostics.push( - Diagnostic::error() - .with_code(ErrorCode::E21) - .with_message(format!( - "invalid constraint identifier `{}`", constraint.value.unwrap() )) .with_labels(vec![ constraint.loc.primary(), field.loc.secondary().with_message(format!( - "`{}` is declared here as {} typedef field", - constraint.id, - decl.kind() + "`{}` is declared here as typedef field", + constraint.id )), ]) .with_notes(vec!["hint: expected enum value".to_owned()]), ), - } - } - Some(_) => unreachable!(), - } - } - - fn check_constraints<'d>( - constraints: &'d [Constraint], - parent_decl: &parser_ast::Decl, - scope: &Scope<parser_ast::Annotation>, - mut constraints_by_id: HashMap<String, &'d Constraint>, - diagnostics: &mut Diagnostics, - ) { - for constraint in constraints { - check_constraint(constraint, parent_decl, scope, diagnostics); - if let Some(prev) = constraints_by_id.insert(constraint.id.to_string(), constraint) { - // Constraint appears twice in current set. - diagnostics.push( + Some(tag_id) => match tags.iter().find(|tag| tag.id() == tag_id) { + None => diagnostics.push( + Diagnostic::error() + .with_code(ErrorCode::E20) + .with_message(format!("undeclared enum tag `{}`", tag_id)) + .with_labels(vec![ + constraint.loc.primary(), + field.loc.secondary().with_message(format!( + "`{}` is declared here", + constraint.id + )), + ]), + ), + Some(Tag::Range { .. }) => diagnostics.push( + Diagnostic::error() + .with_code(ErrorCode::E42) + .with_message(format!("enum tag `{}` defines a range", tag_id)) + .with_labels(vec![ + constraint.loc.primary(), + field.loc.secondary().with_message(format!( + "`{}` is declared here", + constraint.id + )), + ]) + .with_notes(vec!["hint: expected enum tag with value".to_owned()]), + ), + Some(_) => (), + }, + }, + Some(decl) => diagnostics.push( Diagnostic::error() - .with_code(ErrorCode::DuplicateConstraintIdentifier) + .with_code(ErrorCode::E21) .with_message(format!( - "duplicate constraint identifier `{}`", - constraint.id + "invalid constraint identifier `{}`", + constraint.value.unwrap() )) .with_labels(vec![ constraint.loc.primary(), - prev.loc - .secondary() - .with_message(format!("`{}` is first constrained here", prev.id)), - ]), - ) + field.loc.secondary().with_message(format!( + "`{}` is declared here as {} typedef field", + constraint.id, + decl.kind() + )), + ]) + .with_notes(vec!["hint: expected enum value".to_owned()]), + ), } } + Some(_) => unreachable!(), } +} +/// Helper function for validating a list of constraints. +fn check_constraints_list<'d>( + constraints: &'d [Constraint], + parent_decl: &parser_ast::Decl, + scope: &Scope<parser_ast::Annotation>, + mut constraints_by_id: HashMap<String, &'d Constraint>, + diagnostics: &mut Diagnostics, +) { + for constraint in constraints { + check_constraint(constraint, parent_decl, scope, diagnostics); + if let Some(prev) = constraints_by_id.insert(constraint.id.to_string(), constraint) { + // Constraint appears twice in current set. + diagnostics.push( + Diagnostic::error() + .with_code(ErrorCode::DuplicateConstraintIdentifier) + .with_message(format!("duplicate constraint identifier `{}`", constraint.id)) + .with_labels(vec![ + constraint.loc.primary(), + prev.loc + .secondary() + .with_message(format!("`{}` is first constrained here", prev.id)), + ]), + ) + } + } +} + +/// Check constraints. +/// Raises error diagnostics for the following cases: +/// - undeclared constraint identifier +/// - invalid constraint identifier +/// - invalid constraint scalar value (bad type) +/// - invalid constraint scalar value (overflow) +/// - invalid constraint enum value (bad type) +/// - invalid constraint enum value (undeclared tag) +/// - duplicate constraint +fn check_decl_constraints( + file: &parser_ast::File, + scope: &Scope<parser_ast::Annotation>, +) -> Result<(), Diagnostics> { let mut diagnostics: Diagnostics = Default::default(); for decl in &file.declarations { // Check constraints for packet inheritance. @@ -1030,7 +1021,7 @@ fn check_constraints( DeclDesc::Packet { constraints, parent_id: Some(parent_id), .. } | DeclDesc::Struct { constraints, parent_id: Some(parent_id), .. } => { let parent_decl = scope.typedef.get(parent_id).unwrap(); - check_constraints( + check_constraints_list( constraints, parent_decl, scope, @@ -1047,12 +1038,37 @@ fn check_constraints( } _ => (), } + } + diagnostics.err_or(()) +} + +/// Check constraints. +/// Raises error diagnostics for the following cases: +/// - undeclared constraint identifier +/// - invalid constraint identifier +/// - invalid constraint scalar value (bad type) +/// - invalid constraint scalar value (overflow) +/// - invalid constraint enum value (bad type) +/// - invalid constraint enum value (undeclared tag) +/// - duplicate constraint +fn check_group_constraints( + file: &parser_ast::File, + scope: &Scope<parser_ast::Annotation>, +) -> Result<(), Diagnostics> { + let mut diagnostics: Diagnostics = Default::default(); + for decl in &file.declarations { // Check constraints for group inlining. for field in decl.fields() { if let FieldDesc::Group { group_id, constraints } = &field.desc { let group_decl = scope.typedef.get(group_id).unwrap(); - check_constraints(constraints, group_decl, scope, HashMap::new(), &mut diagnostics) + check_constraints_list( + constraints, + group_decl, + scope, + HashMap::new(), + &mut diagnostics, + ) } } } @@ -1542,12 +1558,12 @@ fn compute_field_sizes(file: &parser_ast::File) -> ast::File { } /// Inline group fields and remove group declarations. -fn inline_groups(file: &mut ast::File) -> Result<(), Diagnostics> { +fn inline_groups(file: &parser_ast::File) -> Result<parser_ast::File, Diagnostics> { fn inline_fields<'a>( - fields: impl Iterator<Item = &'a ast::Field>, - groups: &HashMap<String, ast::Decl>, + fields: impl Iterator<Item = &'a parser_ast::Field>, + groups: &HashMap<String, &parser_ast::Decl>, constraints: &HashMap<String, Constraint>, - ) -> Vec<ast::Field> { + ) -> Vec<parser_ast::Field> { fields .flat_map(|field| match &field.desc { FieldDesc::Group { group_id, constraints: group_constraints } => { @@ -1560,17 +1576,17 @@ fn inline_groups(file: &mut ast::File) -> Result<(), Diagnostics> { inline_fields(groups.get(group_id).unwrap().fields(), groups, &constraints) } FieldDesc::Scalar { id, width } if constraints.contains_key(id) => { - vec![ast::Field { + vec![parser_ast::Field { desc: FieldDesc::FixedScalar { width: *width, value: constraints.get(id).unwrap().value.unwrap(), }, loc: field.loc, - annot: field.annot.clone(), + annot: field.annot, }] } FieldDesc::Typedef { id, type_id, .. } if constraints.contains_key(id) => { - vec![ast::Field { + vec![parser_ast::Field { desc: FieldDesc::FixedEnum { enum_id: type_id.clone(), tag_id: constraints @@ -1579,7 +1595,7 @@ fn inline_groups(file: &mut ast::File) -> Result<(), Diagnostics> { .unwrap(), }, loc: field.loc, - annot: field.annot.clone(), + annot: field.annot, }] } _ => vec![field.clone()], @@ -1587,23 +1603,50 @@ fn inline_groups(file: &mut ast::File) -> Result<(), Diagnostics> { .collect() } - let groups = utils::drain_filter(&mut file.declarations, |decl| { - matches!(&decl.desc, DeclDesc::Group { .. }) - }) - .into_iter() - .map(|decl| (decl.id().unwrap().to_owned(), decl)) - .collect::<HashMap<String, _>>(); - - for decl in file.declarations.iter_mut() { - match &mut decl.desc { - DeclDesc::Packet { fields, .. } | DeclDesc::Struct { fields, .. } => { - *fields = inline_fields(fields.iter(), &groups, &HashMap::new()) - } - _ => (), - } - } + let groups = file + .declarations + .iter() + .filter(|decl| matches!(&decl.desc, DeclDesc::Group { .. })) + .map(|decl| (decl.id().unwrap().to_owned(), decl)) + .collect::<HashMap<String, _>>(); + + let declarations = file + .declarations + .iter() + .filter_map(|decl| match &decl.desc { + DeclDesc::Packet { fields, id, parent_id, constraints } => Some(parser_ast::Decl { + desc: DeclDesc::Packet { + fields: inline_fields(fields.iter(), &groups, &HashMap::new()), + id: id.clone(), + parent_id: parent_id.clone(), + constraints: constraints.clone(), + }, + loc: decl.loc, + annot: decl.annot, + }), + DeclDesc::Struct { fields, id, parent_id, constraints } => Some(parser_ast::Decl { + desc: DeclDesc::Struct { + fields: inline_fields(fields.iter(), &groups, &HashMap::new()), + id: id.clone(), + parent_id: parent_id.clone(), + constraints: constraints.clone(), + }, + loc: decl.loc, + annot: decl.annot, + }), + DeclDesc::Group { .. } => None, + _ => Some(decl.clone()), + }) + .collect(); - Ok(()) + Ok(File { + declarations, + + version: file.version.clone(), + file: file.file, + comments: file.comments.clone(), + endianness: file.endianness, + }) } /// Analyzer entry point, produces a new AST with annotations resulting @@ -1613,16 +1656,17 @@ pub fn analyze(file: &parser_ast::File) -> Result<ast::File, Diagnostics> { check_decl_identifiers(file, &scope)?; check_field_identifiers(file)?; check_enum_declarations(file)?; - check_constraints(file, &scope)?; check_size_fields(file)?; check_fixed_fields(file, &scope)?; check_payload_fields(file)?; check_array_fields(file)?; check_padding_fields(file)?; check_checksum_fields(file, &scope)?; - let mut file = compute_field_sizes(file); - inline_groups(&mut file)?; - Ok(file) + check_group_constraints(file, &scope)?; + let file = inline_groups(file)?; + let scope = Scope::new(&file)?; + check_decl_constraints(&file, &scope)?; + Ok(compute_field_sizes(&file)) } #[cfg(test)] @@ -2020,6 +2064,15 @@ mod test { } "# ); + + valid!( + r#" + little_endian_packets + group A { x : 8 } + packet B { A } + packet C : B (x = 1) { } + "# + ); } #[test] @@ -45,7 +45,7 @@ pub struct SourceRange { pub trait Annotation: fmt::Debug + Serialize { type FieldAnnotation: Default + fmt::Debug + Clone; - type DeclAnnotation: Default + fmt::Debug; + type DeclAnnotation: Default + fmt::Debug + Clone; } #[derive(Debug, Serialize, Clone)] @@ -165,7 +165,7 @@ pub struct TestCase { pub input: String, } -#[derive(Debug, Serialize, PartialEq, Eq)] +#[derive(Debug, Serialize, Clone, PartialEq, Eq)] #[serde(tag = "kind")] pub enum DeclDesc<A: Annotation> { #[serde(rename = "checksum_declaration")] @@ -194,7 +194,7 @@ pub enum DeclDesc<A: Annotation> { Test { type_id: String, test_cases: Vec<TestCase> }, } -#[derive(Debug, Serialize)] +#[derive(Debug, Serialize, Clone)] pub struct Decl<A: Annotation> { pub loc: SourceRange, #[serde(skip_serializing)] @@ -203,7 +203,7 @@ pub struct Decl<A: Annotation> { pub desc: DeclDesc<A>, } -#[derive(Debug, Serialize)] +#[derive(Debug, Serialize, Clone)] pub struct File<A: Annotation> { pub version: String, pub file: FileId, @@ -20,4 +20,3 @@ pub mod backends; pub mod parser; #[cfg(test)] pub mod test_utils; -pub mod utils; diff --git a/src/parser.rs b/src/parser.rs index f9d2ffa..1104f68 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -21,7 +21,7 @@ use std::iter::{Filter, Peekable}; pub mod ast { use serde::Serialize; - #[derive(Debug, Serialize, Default, PartialEq, Eq)] + #[derive(Debug, Serialize, Clone, Default, PartialEq, Eq)] pub struct Annotation; impl crate::ast::Annotation for Annotation { diff --git a/src/utils.rs b/src/utils.rs deleted file mode 100644 index 0e64250..0000000 --- a/src/utils.rs +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/// Placeholder implementation of Vec::drain_filter. -/// The feature drain_filter is currently unstable. -pub fn drain_filter<T, F>(input: &mut Vec<T>, predicate: F) -> Vec<T> -where - F: Fn(&T) -> bool, -{ - // Pass 1: compute the total number of removed elements. - let mut total_left_count = 0; - for element in input.iter() { - total_left_count += !predicate(element) as usize; - } - // Pass 2: compute the final position of each element in the input - // array in order to position left elements first and drained elements - // last, preserving the order. - let mut rank = Vec::with_capacity(input.len()); - let mut left_count = 0; - let mut removed_count = 0; - for element in input.iter() { - if predicate(element) { - rank.push(total_left_count + removed_count); - removed_count += 1; - } else { - rank.push(left_count); - left_count += 1; - } - } - // Pass 3: swap the elements to their final position. - let mut n = 0; - while n < input.len() { - let rank_n = rank[n]; - if n != rank_n { - input.swap(n, rank_n); - rank.swap(n, rank_n); - } else { - n += 1; - } - } - // Finally: split off the removed elements off the input vector. - input.split_off(total_left_count) -} - -#[cfg(test)] -mod test { - use crate::utils::drain_filter; - - #[test] - fn test_drain_filter() { - let mut input = vec![1, 4, 2, 5, 3, 6, 7]; - let drained = drain_filter(&mut input, |element| *element > 3); - assert_eq!(input, vec![1, 2, 3]); - assert_eq!(drained, vec![4, 5, 6, 7]); - } -} |