Skip to content

Commit cfb1765

Browse files
author
kek kek kek
authored
feat: Add multi-line comments (#1936)
lex block comments
1 parent 2cb177a commit cfb1765

File tree

1 file changed

+71
-0
lines changed

1 file changed

+71
-0
lines changed

crates/noirc_frontend/src/lexer/lexer.rs

+71
Original file line numberDiff line numberDiff line change
@@ -176,6 +176,9 @@ impl<'a> Lexer<'a> {
176176
if self.peek_char_is('/') {
177177
self.next_char();
178178
return self.parse_comment();
179+
} else if self.peek_char_is('*') {
180+
self.next_char();
181+
return self.parse_block_comment();
179182
}
180183
Ok(spanned_prev_token)
181184
}
@@ -335,6 +338,31 @@ impl<'a> Lexer<'a> {
335338
self.next_token()
336339
}
337340

341+
fn parse_block_comment(&mut self) -> SpannedTokenResult {
342+
let mut depth = 1usize;
343+
344+
while let Some(ch) = self.next_char() {
345+
match ch {
346+
'/' if self.peek_char_is('*') => {
347+
self.next_char();
348+
depth += 1;
349+
}
350+
'*' if self.peek_char_is('/') => {
351+
self.next_char();
352+
depth -= 1;
353+
354+
// This block comment is closed, so for a construction like "/* */ */"
355+
if depth == 0 {
356+
break;
357+
}
358+
}
359+
_ => {}
360+
}
361+
}
362+
363+
self.next_token()
364+
}
365+
338366
/// Skips white space. They are not significant in the source language
339367
fn eat_whitespace(&mut self) {
340368
self.eat_while(None, |ch| ch.is_whitespace());
@@ -480,6 +508,49 @@ fn test_comment() {
480508
}
481509
}
482510

511+
#[test]
512+
fn test_block_comment() {
513+
let input = "
514+
/* comment */
515+
let x = 5
516+
/* comment */
517+
";
518+
519+
let expected = vec![
520+
Token::Keyword(Keyword::Let),
521+
Token::Ident("x".to_string()),
522+
Token::Assign,
523+
Token::Int(FieldElement::from(5_i128)),
524+
];
525+
526+
let mut lexer = Lexer::new(input);
527+
for token in expected.into_iter() {
528+
let first_lexer_output = lexer.next_token().unwrap();
529+
assert_eq!(first_lexer_output, token);
530+
}
531+
}
532+
533+
#[test]
534+
fn test_nested_block_comments() {
535+
let input = "
536+
/* /* */ /** */ /*! */ */
537+
let x = 5
538+
/* /* */ /** */ /*! */ */
539+
";
540+
541+
let expected = vec![
542+
Token::Keyword(Keyword::Let),
543+
Token::Ident("x".to_string()),
544+
Token::Assign,
545+
Token::Int(FieldElement::from(5_i128)),
546+
];
547+
548+
let mut lexer = Lexer::new(input);
549+
for token in expected.into_iter() {
550+
let first_lexer_output = lexer.next_token().unwrap();
551+
assert_eq!(first_lexer_output, token);
552+
}
553+
}
483554
#[test]
484555
fn test_eat_string_literal() {
485556
let input = "let _word = \"hello\"";

0 commit comments

Comments
 (0)