1use proc_macro2 as p;
2use proc_macro2::{Span, TokenStream, TokenTree};
3
4mod builder;
5mod generated;
6mod inner;
7
8use self::builder::Builder;
9use self::inner::*;
10
11pub struct Quote {
12 cx: &'static str,
13 stream: &'static str,
14}
15
16impl Quote {
17 pub fn new() -> Self {
19 Self {
20 cx: "__rune_macros_ctx",
21 stream: "__rune_macros_stream",
22 }
23 }
24
25 pub fn parse(&self, input: TokenStream) -> syn::Result<TokenStream> {
28 let mut output = self.process(input)?;
29 output.push(("Ok", p(p(()))));
30
31 let arg = (
32 ("move", '|', self.cx, ',', self.stream, '|'),
33 braced(output),
34 );
35
36 let mut output = Builder::new();
37 output.push((MACROS, S, "quote_fn", p(arg)));
38 Ok(output.into_stream())
39 }
40
41 fn process(&self, input: TokenStream) -> syn::Result<Builder> {
42 let mut output = Builder::new();
43
44 let mut stack = vec![(p::Delimiter::None, input.into_iter().peekable())];
45
46 while let Some((_, it)) = stack.last_mut() {
47 let Some(tt) = it.next() else {
48 let Some((d, _)) = stack.pop() else {
49 return Err(syn::Error::new(Span::call_site(), "stack is empty"));
50 };
51
52 if let Some(variant) = Delimiter::from_proc_macro(d) {
54 self.encode_to_tokens(
55 Span::call_site(),
56 &mut output,
57 (Kind("Close"), p(variant)),
58 );
59 }
60
61 continue;
62 };
63
64 match tt {
65 TokenTree::Group(group) => {
66 if let Some(variant) = Delimiter::from_proc_macro(group.delimiter()) {
68 self.encode_to_tokens(
69 group.span(),
70 &mut output,
71 (Kind("Open"), p(variant)),
72 );
73 }
74
75 stack.push((group.delimiter(), group.stream().into_iter().peekable()));
76 }
77 TokenTree::Ident(ident) => {
78 if ident == "_" {
81 self.encode_to_tokens(ident.span(), &mut output, Kind("Underscore"));
82 continue;
83 }
84
85 let string = ident.to_string();
86
87 let kind = match generated::kind_from_ident(string.as_str()) {
88 Some(kind) => kind,
89 None => {
90 self.encode_to_tokens(
91 ident.span(),
92 &mut output,
93 NewIdent(self.cx, &string),
94 );
95 continue;
96 }
97 };
98
99 self.encode_to_tokens(ident.span(), &mut output, kind);
100 }
101 TokenTree::Punct(punct) => {
102 if punct.as_char() == '#'
103 && self.try_parse_expansion(&punct, &mut output, it)?
104 {
105 continue;
106 }
107
108 let mut buf = ['\0'; 3];
109 consume_punct(&punct, it, buf.iter_mut());
110
111 let kind = match generated::kind_from_punct(&buf) {
112 Some(kind) => kind,
113 _ => {
114 return Err(syn::Error::new(punct.span(), "unsupported punctuation"));
115 }
116 };
117
118 self.encode_to_tokens(punct.span(), &mut output, kind);
119 }
120 TokenTree::Literal(lit) => {
121 self.encode_to_tokens(lit.span(), &mut output, NewLit(self.cx, lit));
122 }
123 }
124 }
125
126 Ok(output)
127 }
128
129 fn try_parse_expansion(
131 &self,
132 punct: &p::Punct,
133 output: &mut Builder,
134 it: &mut Peekable<impl Iterator<Item = TokenTree> + Clone>,
135 ) -> syn::Result<bool> {
136 let mut lh = it.clone();
138
139 let next = match lh.next() {
140 Some(next) => next,
141 None => return Ok(false),
142 };
143
144 match next {
145 TokenTree::Ident(ident) => {
147 self.encode_to_tokens(punct.span(), output, ident);
148 }
149 TokenTree::Group(group) if group.delimiter() == p::Delimiter::Parenthesis => {
151 let group = group.stream();
152
153 let sep = match (lh.next(), lh.next()) {
155 (Some(sep), Some(TokenTree::Punct(p))) if p.as_char() == '*' => sep,
156 _ => return Ok(false),
157 };
158
159 output.push((
160 ("let", "mut", "it"),
161 '=',
162 ("IntoIterator", S, "into_iter", p(('&', group))),
163 ('.', "peekable", p(())),
164 ';',
165 ));
166
167 let body = (
168 (
169 ToTokensFn,
170 p(('&', "value", ',', self.cx, ',', self.stream)),
171 '?',
172 ';',
173 ),
174 ("if", "it", '.', "peek", p(()), '.', "is_some", p(())),
175 braced(self.process(TokenStream::from(sep))?),
176 );
177
178 output.push((
179 ("while", "let", "Some", p("value")),
180 '=',
181 ("it", '.', "next", p(()), braced(body)),
182 ));
183
184 it.next();
185 it.next();
186 it.next();
187 return Ok(true);
188 }
189 _ => return Ok(false),
191 }
192
193 it.next();
194 Ok(true)
195 }
196
197 fn encode_to_tokens(&self, span: Span, output: &mut Builder, tokens: impl ToTokens) {
198 output.push_spanned(
199 span,
200 (
201 ToTokensFn,
202 p(('&', tokens, ',', self.cx, ',', self.stream)),
203 ('?', ';'),
204 ),
205 );
206 }
207}
208
209use std::iter::Peekable;
210
211fn consume_punct<'o>(
212 initial: &p::Punct,
213 it: &mut Peekable<impl Iterator<Item = TokenTree>>,
214 mut out: impl Iterator<Item = &'o mut char>,
215) {
216 *out.next().unwrap() = initial.as_char();
217
218 if !matches!(initial.spacing(), p::Spacing::Joint) {
219 return;
220 }
221
222 for o in out {
223 let (spacing, ch) = match it.peek() {
224 Some(TokenTree::Punct(p)) => (p.spacing(), p.as_char()),
225 _ => break,
226 };
227
228 *o = ch;
229
230 it.next();
231 if !matches!(spacing, p::Spacing::Joint) {
232 break;
233 }
234 }
235}