Skip to content

Commit 5c93492

Browse files
committed
Remove the Option in TokenStream.
It means an allocation is required to create an empty `TokenStream`, but all other operations are simpler and marginally faster due to not having to check for `None`. Overall it simplifies the code for a negligible performance effect. The commit also removes `TokenStream::empty` by implementing `Default`, which is now possible.
1 parent 20cc752 commit 5c93492

File tree

9 files changed

+101
-145
lines changed

9 files changed

+101
-145
lines changed

src/libsyntax/attr/mod.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -551,7 +551,7 @@ impl MetaItem {
551551
impl MetaItemKind {
552552
pub fn tokens(&self, span: Span) -> TokenStream {
553553
match *self {
554-
MetaItemKind::Word => TokenStream::empty(),
554+
MetaItemKind::Word => TokenStream::default(),
555555
MetaItemKind::NameValue(ref lit) => {
556556
let mut vec = vec![TokenTree::token(token::Eq, span).into()];
557557
lit.tokens().append_to_tree_and_joint_vec(&mut vec);

src/libsyntax/ext/expand.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -671,12 +671,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
671671
}
672672
}
673673
Some(TokenTree::Token(..)) => {}
674-
None => return TokenStream::empty(),
674+
None => return TokenStream::default(),
675675
}
676676
self.cx.span_err(span, "custom attribute invocations must be \
677677
of the form `#[foo]` or `#[foo(..)]`, the macro name must only be \
678678
followed by a delimiter token");
679-
TokenStream::empty()
679+
TokenStream::default()
680680
}
681681

682682
fn gate_proc_macro_attr_item(&self, span: Span, item: &Annotatable) {

src/libsyntax/ext/mbe/transcribe.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ pub(super) fn transcribe(
9595
) -> TokenStream {
9696
// Nothing for us to transcribe...
9797
if src.is_empty() {
98-
return TokenStream::empty();
98+
return TokenStream::default();
9999
}
100100

101101
// We descend into the RHS (`src`), expanding things as we go. This stack contains the things

src/libsyntax/ext/placeholders.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
1515
fn mac_placeholder() -> ast::Mac {
1616
ast::Mac {
1717
path: ast::Path { span: DUMMY_SP, segments: Vec::new() },
18-
tts: TokenStream::empty().into(),
18+
tts: TokenStream::default().into(),
1919
delim: ast::MacDelimiter::Brace,
2020
span: DUMMY_SP,
2121
prior_type_ascription: None,

src/libsyntax/ext/proc_macro_server.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -393,7 +393,7 @@ impl server::Types for Rustc<'_> {
393393

394394
impl server::TokenStream for Rustc<'_> {
395395
fn new(&mut self) -> Self::TokenStream {
396-
TokenStream::empty()
396+
TokenStream::default()
397397
}
398398
fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
399399
stream.is_empty()

src/libsyntax/mut_visit.rs

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -610,10 +610,8 @@ pub fn noop_visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
610610
}
611611

612612
pub fn noop_visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &mut T) {
613-
visit_opt(tts, |tts| {
614-
let tts = Lrc::make_mut(tts);
615-
visit_vec(tts, |(tree, _is_joint)| vis.visit_tt(tree));
616-
})
613+
let tts = Lrc::make_mut(tts);
614+
visit_vec(tts, |(tree, _is_joint)| vis.visit_tt(tree));
617615
}
618616

619617
// Applies ident visitor if it's an ident; applies other visits to interpolated nodes.

src/libsyntax/parse/attr.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -203,7 +203,7 @@ impl<'a> Parser<'a> {
203203
};
204204
TokenStream::from_streams(smallvec![eq.into(), tokens])
205205
} else {
206-
TokenStream::empty()
206+
TokenStream::default()
207207
};
208208
ast::AttrItem { path, tokens }
209209
})

src/libsyntax/tokenstream.rs

Lines changed: 91 additions & 133 deletions
Original file line numberDiff line numberDiff line change
@@ -137,13 +137,8 @@ impl TokenTree {
137137
/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
138138
/// instead of a representation of the abstract syntax tree.
139139
/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat.
140-
///
141-
/// The use of `Option` is an optimization that avoids the need for an
142-
/// allocation when the stream is empty. However, it is not guaranteed that an
143-
/// empty stream is represented with `None`; it may be represented as a `Some`
144-
/// around an empty `Vec`.
145-
#[derive(Clone, Debug)]
146-
pub struct TokenStream(pub Option<Lrc<Vec<TreeAndJoint>>>);
140+
#[derive(Clone, Debug, Default)]
141+
pub struct TokenStream(pub Lrc<Vec<TreeAndJoint>>);
147142

148143
pub type TreeAndJoint = (TokenTree, IsJoint);
149144

@@ -164,36 +159,34 @@ impl TokenStream {
164159
/// separating the two arguments with a comma for diagnostic suggestions.
165160
pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> {
166161
// Used to suggest if a user writes `foo!(a b);`
167-
if let Some(ref stream) = self.0 {
168-
let mut suggestion = None;
169-
let mut iter = stream.iter().enumerate().peekable();
170-
while let Some((pos, ts)) = iter.next() {
171-
if let Some((_, next)) = iter.peek() {
172-
let sp = match (&ts, &next) {
173-
(_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
174-
((TokenTree::Token(token_left), NonJoint),
175-
(TokenTree::Token(token_right), _))
176-
if ((token_left.is_ident() && !token_left.is_reserved_ident())
177-
|| token_left.is_lit()) &&
178-
((token_right.is_ident() && !token_right.is_reserved_ident())
179-
|| token_right.is_lit()) => token_left.span,
180-
((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
181-
_ => continue,
182-
};
183-
let sp = sp.shrink_to_hi();
184-
let comma = (TokenTree::token(token::Comma, sp), NonJoint);
185-
suggestion = Some((pos, comma, sp));
186-
}
187-
}
188-
if let Some((pos, comma, sp)) = suggestion {
189-
let mut new_stream = vec![];
190-
let parts = stream.split_at(pos + 1);
191-
new_stream.extend_from_slice(parts.0);
192-
new_stream.push(comma);
193-
new_stream.extend_from_slice(parts.1);
194-
return Some((TokenStream::new(new_stream), sp));
162+
let mut suggestion = None;
163+
let mut iter = self.0.iter().enumerate().peekable();
164+
while let Some((pos, ts)) = iter.next() {
165+
if let Some((_, next)) = iter.peek() {
166+
let sp = match (&ts, &next) {
167+
(_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
168+
((TokenTree::Token(token_left), NonJoint),
169+
(TokenTree::Token(token_right), _))
170+
if ((token_left.is_ident() && !token_left.is_reserved_ident())
171+
|| token_left.is_lit()) &&
172+
((token_right.is_ident() && !token_right.is_reserved_ident())
173+
|| token_right.is_lit()) => token_left.span,
174+
((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
175+
_ => continue,
176+
};
177+
let sp = sp.shrink_to_hi();
178+
let comma = (TokenTree::token(token::Comma, sp), NonJoint);
179+
suggestion = Some((pos, comma, sp));
195180
}
196181
}
182+
if let Some((pos, comma, sp)) = suggestion {
183+
let mut new_stream = vec![];
184+
let parts = self.0.split_at(pos + 1);
185+
new_stream.extend_from_slice(parts.0);
186+
new_stream.push(comma);
187+
new_stream.extend_from_slice(parts.1);
188+
return Some((TokenStream::new(new_stream), sp));
189+
}
197190
None
198191
}
199192
}
@@ -225,28 +218,21 @@ impl PartialEq<TokenStream> for TokenStream {
225218
}
226219

227220
impl TokenStream {
228-
pub fn len(&self) -> usize {
229-
if let Some(ref slice) = self.0 {
230-
slice.len()
231-
} else {
232-
0
233-
}
221+
pub fn new(streams: Vec<TreeAndJoint>) -> TokenStream {
222+
TokenStream(Lrc::new(streams))
234223
}
235224

236-
pub fn empty() -> TokenStream {
237-
TokenStream(None)
225+
pub fn is_empty(&self) -> bool {
226+
self.0.is_empty()
238227
}
239228

240-
pub fn is_empty(&self) -> bool {
241-
match self.0 {
242-
None => true,
243-
Some(ref stream) => stream.is_empty(),
244-
}
229+
pub fn len(&self) -> usize {
230+
self.0.len()
245231
}
246232

247233
pub(crate) fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream {
248234
match streams.len() {
249-
0 => TokenStream::empty(),
235+
0 => TokenStream::default(),
250236
1 => streams.pop().unwrap(),
251237
_ => {
252238
// We are going to extend the first stream in `streams` with
@@ -270,41 +256,24 @@ impl TokenStream {
270256
// Get the first stream. If it's `None`, create an empty
271257
// stream.
272258
let mut iter = streams.drain();
273-
let mut first_stream_lrc = match iter.next().unwrap().0 {
274-
Some(first_stream_lrc) => first_stream_lrc,
275-
None => Lrc::new(vec![]),
276-
};
259+
let mut first_stream_lrc = iter.next().unwrap().0;
277260

278261
// Append the elements to the first stream, after reserving
279262
// space for them.
280263
let first_vec_mut = Lrc::make_mut(&mut first_stream_lrc);
281264
first_vec_mut.reserve(num_appends);
282265
for stream in iter {
283-
if let Some(stream) = stream.0 {
284-
first_vec_mut.extend(stream.iter().cloned());
285-
}
266+
first_vec_mut.extend(stream.0.iter().cloned());
286267
}
287268

288269
// Create the final `TokenStream`.
289-
match first_vec_mut.len() {
290-
0 => TokenStream(None),
291-
_ => TokenStream(Some(first_stream_lrc)),
292-
}
270+
TokenStream(first_stream_lrc)
293271
}
294272
}
295273
}
296274

297-
pub fn new(streams: Vec<TreeAndJoint>) -> TokenStream {
298-
match streams.len() {
299-
0 => TokenStream(None),
300-
_ => TokenStream(Some(Lrc::new(streams))),
301-
}
302-
}
303-
304275
pub fn append_to_tree_and_joint_vec(self, vec: &mut Vec<TreeAndJoint>) {
305-
if let Some(stream) = self.0 {
306-
vec.extend(stream.iter().cloned());
307-
}
276+
vec.extend(self.0.iter().cloned());
308277
}
309278

310279
pub fn trees(&self) -> Cursor {
@@ -371,24 +340,22 @@ impl TokenStream {
371340
}
372341

373342
pub fn map_enumerated<F: FnMut(usize, TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
374-
TokenStream(self.0.map(|stream| {
375-
Lrc::new(
376-
stream
377-
.iter()
378-
.enumerate()
379-
.map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint))
380-
.collect())
381-
}))
343+
TokenStream(Lrc::new(
344+
self.0
345+
.iter()
346+
.enumerate()
347+
.map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint))
348+
.collect()
349+
))
382350
}
383351

384352
pub fn map<F: FnMut(TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
385-
TokenStream(self.0.map(|stream| {
386-
Lrc::new(
387-
stream
388-
.iter()
389-
.map(|(tree, is_joint)| (f(tree.clone()), *is_joint))
390-
.collect())
391-
}))
353+
TokenStream(Lrc::new(
354+
self.0
355+
.iter()
356+
.map(|(tree, is_joint)| (f(tree.clone()), *is_joint))
357+
.collect()
358+
))
392359
}
393360
}
394361

@@ -406,44 +373,43 @@ impl TokenStreamBuilder {
406373

407374
// If `self` is not empty and the last tree within the last stream is a
408375
// token tree marked with `Joint`...
409-
if let Some(TokenStream(Some(ref mut last_stream_lrc))) = self.0.last_mut() {
376+
if let Some(TokenStream(ref mut last_stream_lrc)) = self.0.last_mut() {
410377
if let Some((TokenTree::Token(last_token), Joint)) = last_stream_lrc.last() {
411378

412379
// ...and `stream` is not empty and the first tree within it is
413380
// a token tree...
414-
if let TokenStream(Some(ref mut stream_lrc)) = stream {
415-
if let Some((TokenTree::Token(token), is_joint)) = stream_lrc.first() {
416-
417-
// ...and the two tokens can be glued together...
418-
if let Some(glued_tok) = last_token.glue(&token) {
419-
420-
// ...then do so, by overwriting the last token
421-
// tree in `self` and removing the first token tree
422-
// from `stream`. This requires using `make_mut()`
423-
// on the last stream in `self` and on `stream`,
424-
// and in practice this doesn't cause cloning 99.9%
425-
// of the time.
426-
427-
// Overwrite the last token tree with the merged
428-
// token.
429-
let last_vec_mut = Lrc::make_mut(last_stream_lrc);
430-
*last_vec_mut.last_mut().unwrap() =
431-
(TokenTree::Token(glued_tok), *is_joint);
432-
433-
// Remove the first token tree from `stream`. (This
434-
// is almost always the only tree in `stream`.)
435-
let stream_vec_mut = Lrc::make_mut(stream_lrc);
436-
stream_vec_mut.remove(0);
437-
438-
// Don't push `stream` if it's empty -- that could
439-
// block subsequent token gluing, by getting
440-
// between two token trees that should be glued
441-
// together.
442-
if !stream.is_empty() {
443-
self.0.push(stream);
444-
}
445-
return;
381+
let TokenStream(ref mut stream_lrc) = stream;
382+
if let Some((TokenTree::Token(token), is_joint)) = stream_lrc.first() {
383+
384+
// ...and the two tokens can be glued together...
385+
if let Some(glued_tok) = last_token.glue(&token) {
386+
387+
// ...then do so, by overwriting the last token
388+
// tree in `self` and removing the first token tree
389+
// from `stream`. This requires using `make_mut()`
390+
// on the last stream in `self` and on `stream`,
391+
// and in practice this doesn't cause cloning 99.9%
392+
// of the time.
393+
394+
// Overwrite the last token tree with the merged
395+
// token.
396+
let last_vec_mut = Lrc::make_mut(last_stream_lrc);
397+
*last_vec_mut.last_mut().unwrap() =
398+
(TokenTree::Token(glued_tok), *is_joint);
399+
400+
// Remove the first token tree from `stream`. (This
401+
// is almost always the only tree in `stream`.)
402+
let stream_vec_mut = Lrc::make_mut(stream_lrc);
403+
stream_vec_mut.remove(0);
404+
405+
// Don't push `stream` if it's empty -- that could
406+
// block subsequent token gluing, by getting
407+
// between two token trees that should be glued
408+
// together.
409+
if !stream.is_empty() {
410+
self.0.push(stream);
446411
}
412+
return;
447413
}
448414
}
449415
}
@@ -476,16 +442,11 @@ impl Cursor {
476442
}
477443

478444
pub fn next_with_joint(&mut self) -> Option<TreeAndJoint> {
479-
match self.stream.0 {
480-
None => None,
481-
Some(ref stream) => {
482-
if self.index < stream.len() {
483-
self.index += 1;
484-
Some(stream[self.index - 1].clone())
485-
} else {
486-
None
487-
}
488-
}
445+
if self.index < self.stream.len() {
446+
self.index += 1;
447+
Some(self.stream.0[self.index - 1].clone())
448+
} else {
449+
None
489450
}
490451
}
491452

@@ -494,16 +455,13 @@ impl Cursor {
494455
return;
495456
}
496457
let index = self.index;
497-
let stream = mem::replace(&mut self.stream, TokenStream(None));
458+
let stream = mem::take(&mut self.stream);
498459
*self = TokenStream::from_streams(smallvec![stream, new_stream]).into_trees();
499460
self.index = index;
500461
}
501462

502463
pub fn look_ahead(&self, n: usize) -> Option<TokenTree> {
503-
match self.stream.0 {
504-
None => None,
505-
Some(ref stream) => stream[self.index ..].get(n).map(|(tree, _)| tree.clone()),
506-
}
464+
self.stream.0[self.index ..].get(n).map(|(tree, _)| tree.clone())
507465
}
508466
}
509467

src/libsyntax_ext/plugin_macro_defs.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ fn plugin_macro_def(name: Name, span: Span) -> P<Item> {
2020
attr::mk_word_item(Ident::new(sym::rustc_builtin_macro, span)));
2121

2222
let parens: TreeAndJoint = TokenTree::Delimited(
23-
DelimSpan::from_single(span), token::Paren, TokenStream::empty()
23+
DelimSpan::from_single(span), token::Paren, TokenStream::default()
2424
).into();
2525
let trees = vec![parens.clone(), TokenTree::token(token::FatArrow, span).into(), parens];
2626

0 commit comments

Comments
 (0)