1use std::fs;
27use std::path::Path;
28use std::sync::Arc;
29
30use p4::check::Diagnostics;
31use p4::{
32 ast::AST, check, error, error::SemanticError, lexer, parser, preprocessor,
33};
34use proc_macro::TokenStream;
35use serde::Deserialize;
36use serde_tokenstream::ParseWrapper;
37use syn::{parse, LitStr};
38
39#[derive(Deserialize)]
40struct MacroSettings {
41 p4: ParseWrapper<LitStr>,
42 pipeline_name: ParseWrapper<LitStr>,
43}
44
45struct GenerationSettings {
46 pipeline_name: String,
47}
48
49impl Default for GenerationSettings {
50 fn default() -> Self {
51 Self {
52 pipeline_name: "main".to_owned(),
53 }
54 }
55}
56
57#[proc_macro]
65pub fn use_p4(item: TokenStream) -> TokenStream {
66 match do_use_p4(item) {
67 Err(err) => err.to_compile_error().into(),
68 Ok(out) => out,
69 }
70}
71
72fn do_use_p4(item: TokenStream) -> Result<TokenStream, syn::Error> {
73 let (filename, settings) =
74 if let Ok(filename) = parse::<LitStr>(item.clone()) {
75 (filename.value(), GenerationSettings::default())
76 } else {
77 let MacroSettings { p4, pipeline_name } =
78 serde_tokenstream::from_tokenstream(&item.into())?;
79 (
80 p4.into_inner().value(),
81 GenerationSettings {
82 pipeline_name: pipeline_name.into_inner().value(),
83 },
84 )
85 };
86
87 generate_rs(filename, settings)
88}
89
90fn generate_rs(
91 filename: String,
92 settings: GenerationSettings,
93) -> Result<TokenStream, syn::Error> {
94 let mut ast = AST::default();
97 process_file(Arc::new(filename), &mut ast, &settings)?;
98
99 let (hlir, _) = check::all(&ast);
100
101 let tokens: TokenStream = p4_rust::emit_tokens(
102 &ast,
103 &hlir,
104 p4_rust::Settings {
105 pipeline_name: settings.pipeline_name.clone(),
106 },
107 )
108 .into();
109
110 Ok(tokens)
111}
112
113fn process_file(
114 filename: Arc<String>,
115 ast: &mut AST,
116 _settings: &GenerationSettings,
117) -> Result<(), syn::Error> {
118 let contents = match fs::read_to_string(&*filename) {
119 Ok(c) => c,
120 Err(e) => panic!("failed to read file {}: {}", filename, e),
121 };
122 let ppr = preprocessor::run(&contents, filename.clone()).unwrap();
123 for included in &ppr.elements.includes {
124 let path = Path::new(included);
125 if !path.is_absolute() {
126 let parent = Path::new(&*filename).parent().unwrap();
127 let joined = parent.join(included);
128 process_file(
129 Arc::new(joined.to_str().unwrap().to_string()),
130 ast,
131 _settings,
132 )?
133 } else {
134 process_file(Arc::new(included.clone()), ast, _settings)?;
135 }
136 }
137
138 let (_, diags) = check::all(ast);
139 let lines: Vec<&str> = ppr.lines.iter().map(|x| x.as_str()).collect();
140 check(&lines, &diags);
141 let lxr = lexer::Lexer::new(lines.clone(), filename);
142 let mut psr = parser::Parser::new(lxr);
143 psr.run(ast).unwrap();
144 p4_rust::sanitize(ast);
145 Ok(())
146}
147
148fn check(lines: &[&str], diagnostics: &Diagnostics) {
150 let errors = diagnostics.errors();
151 if !errors.is_empty() {
152 let mut err = Vec::new();
153 for e in errors {
154 err.push(SemanticError {
155 at: e.token.clone(),
156 message: e.message.clone(),
157 source: lines[e.token.line].into(),
158 });
159 }
160 panic!("{}", error::Error::Semantic(err));
161 }
162}