Skip to content

Commit d532160

Browse files
committed
Include comment text in token
1 parent ba427c8 commit d532160

File tree

4 files changed

+11
-10
lines changed

4 files changed

+11
-10
lines changed

compiler/parser/python.lalrpop

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1463,6 +1463,6 @@ extern {
14631463
name => lexer::Tok::Name { name: <String> },
14641464
"\n" => lexer::Tok::Newline,
14651465
";" => lexer::Tok::Semi,
1466-
"#" => lexer::Tok::Comment,
1466+
"#" => lexer::Tok::Comment(_),
14671467
}
14681468
}

compiler/parser/src/lexer.rs

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -444,16 +444,17 @@ where
444444
/// Skip everything until end of line
445445
fn lex_comment(&mut self) -> LexResult {
446446
let start_pos = self.get_pos();
447-
self.next_char();
447+
let mut value = String::new();
448+
value.push(self.next_char().unwrap());
448449
loop {
449450
match self.window[0] {
450451
Some('\n') | None => {
451452
let end_pos = self.get_pos();
452-
return Ok((start_pos, Tok::Comment, end_pos));
453+
return Ok((start_pos, Tok::Comment(value), end_pos));
453454
}
454455
Some(_) => {}
455456
}
456-
self.next_char();
457+
value.push(self.next_char().unwrap());
457458
}
458459
}
459460

@@ -1266,7 +1267,7 @@ mod tests {
12661267
fn $name() {
12671268
let source = format!(r"99232 # {}", $eol);
12681269
let tokens = lex_source(&source);
1269-
assert_eq!(tokens, vec![Tok::Int { value: BigInt::from(99232) }, Tok::Comment, Tok::Newline]);
1270+
assert_eq!(tokens, vec![Tok::Int { value: BigInt::from(99232) }, Tok::Comment(format!("# {}", $eol)), Tok::Newline]);
12701271
}
12711272
)*
12721273
}
@@ -1290,7 +1291,7 @@ mod tests {
12901291
tokens,
12911292
vec![
12921293
Tok::Int { value: BigInt::from(123) },
1293-
Tok::Comment,
1294+
Tok::Comment("# Foo".to_string()),
12941295
Tok::Newline,
12951296
Tok::Int { value: BigInt::from(456) },
12961297
Tok::Newline,

compiler/parser/src/parser.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ pub fn parse_located(
9696
let marker_token = (Default::default(), mode.to_marker(), Default::default());
9797
let tokenizer = iter::once(Ok(marker_token))
9898
.chain(lxr)
99-
.filter_ok(|(_, tok, _)| !matches!(tok, Tok::Comment));
99+
.filter_ok(|(_, tok, _)| !matches!(tok, Tok::Comment { .. }));
100100

101101
python::TopParser::new()
102102
.parse(tokenizer)
@@ -112,7 +112,7 @@ pub fn parse_tokens(
112112
let marker_token = (Default::default(), mode.to_marker(), Default::default());
113113
let tokenizer = iter::once(Ok(marker_token))
114114
.chain(lxr)
115-
.filter_ok(|(_, tok, _)| !matches!(tok, Tok::Comment));
115+
.filter_ok(|(_, tok, _)| !matches!(tok, Tok::Comment(_)));
116116

117117
python::TopParser::new()
118118
.parse(tokenizer)

compiler/parser/src/token.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ pub enum Tok {
3737
Rsqb,
3838
Colon,
3939
Comma,
40-
Comment,
40+
Comment(String),
4141
Semi,
4242
Plus,
4343
Minus,
@@ -148,7 +148,7 @@ impl fmt::Display for Tok {
148148
Rsqb => f.write_str("']'"),
149149
Colon => f.write_str("':'"),
150150
Comma => f.write_str("','"),
151-
Comment => f.write_str("#"),
151+
Comment(value) => f.write_str(value),
152152
Semi => f.write_str("';'"),
153153
Plus => f.write_str("'+'"),
154154
Minus => f.write_str("'-'"),

0 commit comments

Comments
 (0)