1 // Copyright 2023 Google LLC
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 //! Rust no-allocation backend
16 //!
17 //! The motivation for this backend is to be a more "idiomatic" backend than
18 //! the existing backend. Specifically, it should
19 //! 1. Use lifetimes, not reference counting
20 //! 2. Avoid expensive memory copies unless needed
21 //! 3. Use the intermediate Schema rather than doing all the logic from scratch
22 //!
23 //! One notable consequence is that we avoid .specialize(), as it has "magic" behavior
24 //! not defined in the spec. Instead we mimic the C++ approach of calling tryParse() and
25 //! getting a Result<> back.
26
27 mod computed_values;
28 mod enums;
29 mod packet_parser;
30 mod packet_serializer;
31 pub mod test;
32 mod utils;
33
34 use std::collections::HashMap;
35
36 use proc_macro2::TokenStream;
37 use quote::quote;
38
39 use crate::ast;
40
41 use self::{
42 enums::generate_enum, packet_parser::generate_packet,
43 packet_serializer::generate_packet_serializer,
44 };
45
46 use super::intermediate::Schema;
47
generate(file: &ast::File, schema: &Schema) -> Result<String, String>48 pub fn generate(file: &ast::File, schema: &Schema) -> Result<String, String> {
49 match file.endianness.value {
50 ast::EndiannessValue::LittleEndian => {}
51 _ => unimplemented!("Only little_endian endianness supported"),
52 };
53
54 let mut out = String::new();
55
56 out.push_str(include_str!("preamble.rs"));
57
58 let mut children = HashMap::<&str, Vec<&str>>::new();
59 for decl in &file.declarations {
60 match &decl.desc {
61 ast::DeclDesc::Packet { id, parent_id: Some(parent_id), .. }
62 | ast::DeclDesc::Struct { id, parent_id: Some(parent_id), .. } => {
63 children.entry(parent_id.as_str()).or_default().push(id.as_str());
64 }
65 _ => {}
66 }
67 }
68
69 let declarations = file
70 .declarations
71 .iter()
72 .map(|decl| generate_decl(decl, schema, &children))
73 .collect::<Result<TokenStream, _>>()?;
74
75 let syntax_tree = syn::parse2(declarations).expect("Could not parse code");
76 out.push_str(&prettyplease::unparse(&syntax_tree));
77
78 Ok(out)
79 }
80
generate_decl( decl: &ast::Decl, schema: &Schema, children: &HashMap<&str, Vec<&str>>, ) -> Result<TokenStream, String>81 fn generate_decl(
82 decl: &ast::Decl,
83 schema: &Schema,
84 children: &HashMap<&str, Vec<&str>>,
85 ) -> Result<TokenStream, String> {
86 match &decl.desc {
87 ast::DeclDesc::Enum { id, tags, width, .. } => Ok(generate_enum(id, tags, *width)),
88 ast::DeclDesc::Packet { id, fields, parent_id, .. }
89 | ast::DeclDesc::Struct { id, fields, parent_id, .. } => {
90 let parser = generate_packet(
91 id,
92 fields,
93 parent_id.as_deref(),
94 schema,
95 &schema.packets_and_structs[id.as_str()],
96 )?;
97 let serializer = generate_packet_serializer(
98 id,
99 parent_id.as_deref(),
100 fields,
101 schema,
102 &schema.packets_and_structs[id.as_str()],
103 children,
104 );
105 Ok(quote! {
106 #parser
107 #serializer
108 })
109 }
110 _ => unimplemented!("Unsupported decl type"),
111 }
112 }
113