tstr_proc_macros/
lib.rs
1#![allow(clippy::or_fun_call)]
2#![allow(clippy::useless_conversion)]
3
4extern crate proc_macro;
5
6#[cfg(not(feature = "proc_macro2_"))]
7use proc_macro as used_proc_macro;
8
9#[cfg(feature = "proc_macro2_")]
10use proc_macro2 as used_proc_macro;
11
12use std::iter;
13
14#[allow(unused_imports)]
15use used_proc_macro::{
16 Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree,
17};
18
19#[cfg(feature = "syn_")]
20mod use_syn;
21
22#[cfg(not(feature = "syn_"))]
23mod non_syn_parsing;
24
25#[cfg(not(feature = "const_generics"))]
26mod nested_tuple_compute;
27
28mod utils;
29
30#[cfg(all(feature = "min_const_generics", not(feature = "const_generics")))]
31mod min_const_generics;
32
33#[cfg(all(feature = "min_const_generics", not(feature = "const_generics")))]
34use min_const_generics::output_tstr_param;
35
36#[cfg(not(feature = "min_const_generics"))]
37mod no_const_generics;
38
39#[cfg(not(feature = "min_const_generics"))]
40use no_const_generics::output_tstr_param;
41
42#[doc(hidden)]
43#[proc_macro]
44pub fn __ts_impl(input_tokens: proc_macro::TokenStream) -> proc_macro::TokenStream {
45 use crate::utils::{paren, punct_token};
46
47 let input_tokens = TokenStream::from(input_tokens);
48
49 #[cfg(feature = "syn_")]
50 let parsed = syn::parse2::<Inputs>(input_tokens);
51
52 #[cfg(not(feature = "syn_"))]
53 let parsed = non_syn_parsing::parse_inputs(input_tokens);
54
55 match parsed {
56 Ok(Inputs {
57 crate_path,
58 strings,
59 }) => {
60 let mut out = TokenStream::new();
61 if strings.len() == 1 {
62 output_tstr(&crate_path, &strings[0], &mut out);
63 } else {
64 let tt = paren(Span::call_site(), |out| {
65 for tstr in &strings {
66 output_tstr(&crate_path, tstr, out);
67 out.extend(punct_token(',', tstr.span));
68 }
69 });
70 out.extend(iter::once(tt));
71 }
72 out
73 }
74 Err(e) => e.to_compile_error(),
75 }
76 .into()
77}
78
79fn output_tstr(crate_path: &TokenStream, tstr: &TStr, out: &mut TokenStream) {
80 use crate::utils::{colon2_token, ident_token, punct_token};
81
82 let span = tstr.span;
83 out.extend(crate_path.clone());
84 out.extend(colon2_token(span));
85 out.extend(ident_token("TStr", span));
86 out.extend(punct_token('<', span));
87
88 #[cfg(feature = "const_generics")]
89 {
90 out.extend(crate_path.clone());
91 out.extend(colon2_token(span));
92 out.extend(ident_token("___", span));
93 out.extend(punct_token('<', span));
94 }
95
96 output_tstr_param(crate_path, tstr, out);
97
98 #[cfg(feature = "const_generics")]
99 {
100 out.extend(punct_token('>', span));
101 }
102
103 out.extend(punct_token('>', span));
104}
105
106#[cfg(feature = "const_generics")]
107fn output_tstr_param(_crate_path: &TokenStream, tstr: &TStr, out: &mut TokenStream) {
108 let string = tstr.string.as_str();
109 let span = tstr.span;
110
111 let mut lit = Literal::string(&string);
112 lit.set_span(span);
113 out.extend(iter::once(TokenTree::from(lit)));
114}
115
116struct Inputs {
117 crate_path: TokenStream,
118 strings: Vec<TStr>,
119}
120
121struct TStr {
122 string: String,
123 span: Span,
124}