Home
last modified time | relevance | path

Searched refs:tokens (Results 1 – 5 of 5) sorted by relevance

/rust/macros/
A Dpaste.rs5 fn concat_helper(tokens: &[TokenTree]) -> Vec<(String, Span)> { in concat_helper()
6 let mut tokens = tokens.iter(); in concat_helper() variables
10 match tokens.next() { in concat_helper()
61 let segments = concat_helper(tokens); in concat()
66 pub(crate) fn expand(tokens: &mut Vec<TokenTree>) { in expand()
67 for token in tokens.iter_mut() { in expand()
91 for i in (0..tokens.len().saturating_sub(3)).rev() { in expand()
94 (&tokens[i + 1], &tokens[i + 2]), in expand()
98 match &tokens[i + 3] { in expand()
105 match &tokens[i] { in expand()
[all …]
A Dquote.rs7 fn to_tokens(&self, tokens: &mut TokenStream); in to_tokens()
11 fn to_tokens(&self, tokens: &mut TokenStream) { in to_tokens()
13 v.to_tokens(tokens); in to_tokens()
19 fn to_tokens(&self, tokens: &mut TokenStream) { in to_tokens()
25 fn to_tokens(&self, tokens: &mut TokenStream) { in to_tokens()
31 fn to_tokens(&self, tokens: &mut TokenStream) { in to_tokens()
32 tokens.extend([self.clone()]); in to_tokens()
37 fn to_tokens(&self, tokens: &mut TokenStream) { in to_tokens()
38 tokens.extend(self.clone()); in to_tokens()
53 tokens = ::std::vec::Vec::new();
[all …]
A Dvtable.rs8 let mut tokens: Vec<_> = ts.into_iter().collect(); in vtable() variables
11 let is_trait = tokens in vtable()
24 let body = match tokens.pop() { in vtable()
94 tokens.push(TokenTree::Group(Group::new(Delimiter::Brace, new_body))); in vtable()
95 tokens.into_iter().collect() in vtable()
A Dkunit.rs21 let mut tokens: Vec<_> = ts.into_iter().collect(); in kunit_tests() variables
24 tokens in kunit_tests()
36 let body = match tokens.pop() { in kunit_tests()
62 tokens.insert( in kunit_tests()
179 tokens.push(TokenTree::Group(Group::new(Delimiter::Brace, final_body))); in kunit_tests()
181 tokens.into_iter().collect() in kunit_tests()
A Dlib.rs397 let mut tokens = input.into_iter().collect(); in paste() variables
398 paste::expand(&mut tokens); in paste()
399 tokens.into_iter().collect() in paste()

Completed in 5 milliseconds