rstml/node/parser_ext.rs
1use proc_macro2::{TokenStream, TokenTree};
2use proc_macro2_diagnostics::{Diagnostic, Level};
3use syn::{
4 parse::{discouraged::Speculative, Parse, ParseStream, Parser},
5 spanned::Spanned,
6};
7
8use crate::recoverable::{ParseRecoverable, RecoverableContext};
9
10impl RecoverableContext {
11 /// Like [`parse_simple`], but splits the tokenstream at `E` first only
12 /// parsing the tokens before it as `T`.
13 ///
14 /// **Note:** This is an internal function exported to make parsing of
15 /// custom nodes easier. It has some quirks, e.g.,
16 /// `parse_simple_until<Expr, Token![>]>`, would not support any
17 /// [`Expr`] containing a `>`.
18 ///
19 /// It is not considered stable.
20 ///
21 /// [`parse_simple`]: #method.parse_simple
22 /// [`Expr`]: https://docs.rs/syn/latest/syn/enum.Expr.html
23 pub fn parse_simple_until<T: Parse, E: Parse>(&mut self, input: ParseStream) -> Option<(T, E)> {
24 let mut tokens = TokenStream::new();
25 let res = loop {
26 // Use fork, because we can't limit separator to be only Peekable for custom
27 // tokens but we also need to parse complex expressions like
28 // "foo=x/y" or "/>"
29 let fork = input.fork();
30 if let Ok(end) = fork.parse() {
31 input.advance_to(&fork);
32 break Some(end);
33 }
34
35 if input.is_empty() {
36 break None;
37 }
38
39 let next: TokenTree = self
40 .parse_simple(input)
41 .expect("TokenTree should always be parsable");
42 tokens.extend([next]);
43 };
44 res.and_then(|res| {
45 self.save_diagnostics(syn::parse2(tokens))
46 .map(|val| (val, res))
47 })
48 }
49
50 /// Parse array of toknes using recoverable parser.
51 /// Stops parsing when other branch could parse anything.
52 ///
53 /// **Note:** This is an internal function exported to make parsing of
54 /// custom nodes easier.
55 /// It is not considered stable.
56 ///
57 /// Example:
58 /// ```ignore
59 /// # use syn::{parse::{Parser, ParseStream}, Ident, Result, parse_macro_input, Token};
60 /// # use rstml::{parse_tokens_until};
61 /// # fn main() -> syn::Result<()>{
62 /// let tokens:proc_macro2::TokenStream = quote::quote!(few idents seperated by spaces and then minus sign - that will stop parsing).into();
63 /// let concat_idents_without_minus = |input: ParseStream| -> Result<String> {
64 /// let (idents, _minus) = parser.parse_tokens_until::<Ident, _, _>(input, |i|
65 /// i.parse::<Token![-]>()
66 /// )?;
67 /// let mut new_str = String::new();
68 /// for ident in idents {
69 /// new_str.push_str(&ident.to_string())
70 /// }
71 /// // .. skip rest idents in input
72 /// # while !input.is_empty() {
73 /// # input.parse::<Ident>()?;
74 /// # }
75 /// Ok(new_str)
76 /// };
77 /// let concated = concat_idents_without_minus.parse2(tokens)?;
78 /// assert_eq!(concated, "fewidentsseperatedbyspacesandthenminussign");
79 /// # Ok(())
80 /// # }
81 /// ```
82 pub fn parse_tokens_until_call<T, F, U>(
83 &mut self,
84 input: ParseStream,
85 stop_fn: F,
86 ) -> (Vec<T>, Option<U>)
87 where
88 T: ParseRecoverable + Spanned,
89 F: Fn(ParseStream) -> syn::Result<U>,
90 {
91 let mut collection = vec![];
92 let res = loop {
93 let old_cursor = input.cursor();
94 let fork = input.fork();
95 if let Ok(res) = stop_fn(&fork) {
96 input.advance_to(&fork);
97 break Some(res);
98 }
99 if let Some(o) = self.parse_recoverable(input) {
100 collection.push(o)
101 }
102
103 if old_cursor == input.cursor() {
104 break None;
105 }
106 };
107 (collection, res)
108 }
109 /// Two-phase parsing, firstly find separator, and then parses array of
110 /// tokens before separator.
111 /// For simple input this method will work like
112 /// `parse_tokens_until`.
113 /// Internally it creates intermediate `TokenStream`` and
114 /// copy of all tokens until separator token is found. It is usefull
115 /// when separator (or it's part) can be treated as part of token T.
116 ///
117 ///
118 /// **Note:** This is an internal function exported to make parsing of
119 /// custom nodes easier.
120 /// It is not considered stable.
121 ///
122 /// Example:
123 /// ```ignore
124 /// let tokens = quote!(some_expr_seperated + with - lt_gt * tokens <> other part);
125 /// ```
126 /// In this example "<" can can be parsed as part of expression, but we want
127 /// to split tokens after "<>" was found. So instead of parsing all
128 /// input as expression, firstly we need to seperate it into two chunks.
129 pub fn parse_tokens_with_conflicted_ending<T, F, U>(
130 &mut self,
131 input: ParseStream,
132 separator: F,
133 ) -> (Vec<T>, Option<U>)
134 where
135 T: ParseRecoverable,
136 F: Fn(ParseStream) -> syn::Result<U>,
137 {
138 let parser = |parser: &mut Self, tokens: TokenStream| {
139 let parse = |input: ParseStream| {
140 let mut collection = vec![];
141
142 while !input.is_empty() {
143 let old_cursor = input.cursor();
144 if let Some(o) = parser.parse_recoverable(input) {
145 collection.push(o)
146 }
147 if old_cursor == input.cursor() {
148 break;
149 }
150 }
151 let eated_tokens = input.parse::<TokenStream>()?;
152 Ok((collection, eated_tokens))
153 };
154 let (collection, eaten_tokens) = parse.parse2(tokens).expect("No errors allowed");
155 if !eaten_tokens.is_empty() {
156 parser.push_diagnostic(Diagnostic::spanned(
157 eaten_tokens.span(),
158 Level::Error,
159 "tokens was ignored during parsing",
160 ))
161 }
162 collection
163 };
164 self.parse_with_ending(input, parser, separator)
165 }
166
167 pub(crate) fn parse_with_ending<F, CNV, V, U>(
168 &mut self,
169 input: ParseStream,
170 parser: CNV,
171 ending: F,
172 ) -> (V, Option<U>)
173 where
174 F: Fn(ParseStream) -> syn::Result<U>,
175 CNV: Fn(&mut Self, TokenStream) -> V,
176 {
177 let mut tokens = TokenStream::new();
178 let res = loop {
179 // Use fork, because we can't limit separator to be only Peekable for custom
180 // tokens but we also need to parse complex expressions like
181 // "foo=x/y" or "/>"
182 let fork = input.fork();
183 if let Ok(end) = ending(&fork) {
184 input.advance_to(&fork);
185 break Some(end);
186 }
187
188 if input.is_empty() {
189 break None;
190 }
191
192 let next: TokenTree = self
193 .parse_simple(input)
194 .expect("TokenTree should always be parsable");
195 tokens.extend([next]);
196 };
197 (parser(self, tokens), res)
198 }
199}