3333#![ feature( rustc_attrs) ]
3434#![ feature( min_specialization) ]
3535#![ feature( strict_provenance) ]
36+ #![ feature( extend_one) ]
3637#![ recursion_limit = "256" ]
3738#![ allow( internal_features) ]
3839#![ deny( ffi_unwind_calls) ]
@@ -44,6 +45,7 @@ pub mod bridge;
4445
4546mod diagnostic;
4647mod escape;
48+ mod to_tokens;
4749
4850use std:: ffi:: CStr ;
4951use std:: ops:: { Range , RangeBounds } ;
@@ -53,6 +55,8 @@ use std::{error, fmt};
5355
5456#[ unstable( feature = "proc_macro_diagnostic" , issue = "54140" ) ]
5557pub use diagnostic:: { Diagnostic , Level , MultiSpan } ;
58+ #[ unstable( feature = "proc_macro_totokens" , issue = "130977" ) ]
59+ pub use to_tokens:: ToTokens ;
5660
5761use crate :: escape:: { EscapeOptions , escape_bytes} ;
5862
@@ -279,6 +283,7 @@ impl ConcatTreesHelper {
279283 }
280284 }
281285
286+ #[ allow( dead_code) ]
282287 fn append_to ( self , stream : & mut TokenStream ) {
283288 if self . trees . is_empty ( ) {
284289 return ;
@@ -325,45 +330,22 @@ impl ConcatStreamsHelper {
325330 }
326331}
327332
328- /// Collects a number of token trees into a single stream.
329- #[ stable( feature = "proc_macro_lib2" , since = "1.29.0" ) ]
330- impl FromIterator < TokenTree > for TokenStream {
331- fn from_iter < I : IntoIterator < Item = TokenTree > > ( trees : I ) -> Self {
332- let iter = trees. into_iter ( ) ;
333- let mut builder = ConcatTreesHelper :: new ( iter. size_hint ( ) . 0 ) ;
334- iter. for_each ( |tree| builder. push ( tree) ) ;
335- builder. build ( )
336- }
337- }
338-
339- /// A "flattening" operation on token streams, collects token trees
340- /// from multiple token streams into a single stream.
341- #[ stable( feature = "proc_macro_lib" , since = "1.15.0" ) ]
342- impl FromIterator < TokenStream > for TokenStream {
343- fn from_iter < I : IntoIterator < Item = TokenStream > > ( streams : I ) -> Self {
344- let iter = streams. into_iter ( ) ;
333+ #[ stable( feature = "proc_macro_totokens_migration" , since = "CURRENT_RUSTC_VERSION" ) ]
334+ impl < T : ToTokens > FromIterator < T > for TokenStream {
335+ fn from_iter < I : IntoIterator < Item = T > > ( t : I ) -> Self {
336+ let iter = t. into_iter ( ) ;
345337 let mut builder = ConcatStreamsHelper :: new ( iter. size_hint ( ) . 0 ) ;
346- iter. for_each ( |stream | builder. push ( stream ) ) ;
338+ iter. for_each ( |t | builder. push ( t . into_token_stream ( ) ) ) ;
347339 builder. build ( )
348340 }
349341}
350342
351- #[ stable( feature = "token_stream_extend" , since = "1.30.0" ) ]
352- impl Extend < TokenTree > for TokenStream {
353- fn extend < I : IntoIterator < Item = TokenTree > > ( & mut self , trees : I ) {
354- let iter = trees. into_iter ( ) ;
355- let mut builder = ConcatTreesHelper :: new ( iter. size_hint ( ) . 0 ) ;
356- iter. for_each ( |tree| builder. push ( tree) ) ;
357- builder. append_to ( self ) ;
358- }
359- }
360-
361- #[ stable( feature = "token_stream_extend" , since = "1.30.0" ) ]
362- impl Extend < TokenStream > for TokenStream {
363- fn extend < I : IntoIterator < Item = TokenStream > > ( & mut self , streams : I ) {
364- let iter = streams. into_iter ( ) ;
343+ #[ stable( feature = "proc_macro_totokens_migration" , since = "CURRENT_RUSTC_VERSION" ) ]
344+ impl < T : ToTokens > Extend < T > for TokenStream {
345+ fn extend < I : IntoIterator < Item = T > > ( & mut self , t : I ) {
346+ let iter = t. into_iter ( ) ;
365347 let mut builder = ConcatStreamsHelper :: new ( iter. size_hint ( ) . 0 ) ;
366- iter. for_each ( |stream | builder. push ( stream ) ) ;
348+ iter. for_each ( |t | builder. push ( t . into_token_stream ( ) ) ) ;
367349 builder. append_to ( self ) ;
368350 }
369351}
0 commit comments