Skip to content

Commit 386ed0d

Browse files
committed
use rust-style doc comments ; ignore others
1 parent 46f81af commit 386ed0d

File tree

3 files changed

+106
-61
lines changed

3 files changed

+106
-61
lines changed

naga/src/front/wgsl/error.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -377,8 +377,8 @@ impl<'a> Error<'a> {
377377
Token::Arrow => "->".to_string(),
378378
Token::Unknown(c) => format!("unknown (`{c}`)"),
379379
Token::Trivia => "trivia".to_string(),
380-
Token::Comment(s) => format!("documentation ('{s}')"),
381-
Token::CommentModule(s) => format!("module documentation ('{s}')"),
380+
Token::CommentDoc(s) => format!("documentation ('{s}')"),
381+
Token::CommentDocModule(s) => format!("module documentation ('{s}')"),
382382
Token::End => "end".to_string(),
383383
},
384384
ExpectedToken::Identifier => "identifier".to_string(),

naga/src/front/wgsl/parse/lexer.rs

+103-58
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,8 @@ pub enum Token<'a> {
2222
Arrow,
2323
Unknown(char),
2424
Trivia,
25-
Comment(&'a str),
26-
CommentModule(&'a str),
25+
CommentDoc(&'a str),
26+
CommentDocModule(&'a str),
2727
End,
2828
}
2929

@@ -83,46 +83,64 @@ fn consume_token(input: &str, generic: bool, save_comments: bool) -> (Token<'_>,
8383
let og_chars = chars.as_str();
8484
match chars.next() {
8585
Some('/') => {
86-
if let Some(end_position) = input
87-
.char_indices()
88-
.find(|char_indices| is_comment_end(char_indices.1))
89-
{
90-
if !save_comments {
91-
return (Token::Trivia, &input[end_position.0..]);
86+
let end_position = {
87+
if let Some(end_position) = input
88+
.char_indices()
89+
.find(|char_indices| is_comment_end(char_indices.1))
90+
{
91+
end_position.0
92+
} else {
93+
input.len()
9294
}
93-
let end_position = end_position.0;
94-
return (
95-
if chars.next() == Some('!') {
96-
Token::CommentModule(&input[..end_position])
97-
} else {
98-
Token::Comment(&input[..end_position])
99-
},
100-
&input[end_position..],
101-
);
102-
}
95+
};
10396
if !save_comments {
104-
return (Token::Trivia, "");
97+
return (Token::Trivia, &input[end_position..]);
10598
}
106-
(Token::Comment(input), "")
99+
let next_char = chars.next();
100+
(
101+
match next_char {
102+
Some('/') => Token::CommentDoc(&input[..end_position]),
103+
Some('!') => Token::CommentDocModule(&input[..end_position]),
104+
_ => Token::Trivia,
105+
},
106+
&input[end_position..],
107+
)
107108
}
108109
Some('*') => {
109110
let mut depth = 1;
110111
let mut prev = None;
111112
let mut char_indices = input.char_indices();
113+
112114
// Skip '/' and '*'
113115
char_indices.next();
114116
char_indices.next();
117+
118+
let mut constructing_token = if !save_comments {
119+
Token::Trivia
120+
} else {
121+
let mut peeker = char_indices.clone().peekable();
122+
let peeked_next_char = peeker.peek();
123+
let peeked_next_char =
124+
peeked_next_char.map(|peeked_next_char| peeked_next_char.1);
125+
match peeked_next_char {
126+
Some('*') => Token::CommentDoc(""),
127+
Some('!') => Token::CommentDocModule(""),
128+
_ => Token::Trivia,
129+
}
130+
};
115131
for (index, c) in char_indices {
116132
match (prev, c) {
117133
(Some('*'), '/') => {
118134
prev = None;
119135
depth -= 1;
120136
if depth == 0 {
121-
if !save_comments {
122-
return (Token::Trivia, &input[(index + 1)..]);
137+
if let Token::CommentDoc(ref mut doc)
138+
| Token::CommentDocModule(ref mut doc) = constructing_token
139+
{
140+
*doc = &input[..=index];
123141
}
124-
let doc = &input[..=index];
125-
return (Token::Comment(doc), &input[(index + 1)..]);
142+
143+
return (constructing_token, &input[(index + 1)..]);
126144
}
127145
}
128146
(Some('/'), '*') => {
@@ -288,7 +306,7 @@ impl<'a> Lexer<'a> {
288306
loop {
289307
// Eat all trivia because `next` doesn't eat trailing trivia.
290308
let (token, rest) = consume_token(self.input, false, self.save_comments);
291-
if let Token::Trivia | Token::Comment(_) | Token::CommentModule(_) = token {
309+
if let Token::Trivia | Token::CommentDoc(_) | Token::CommentDocModule(_) = token {
292310
self.input = rest;
293311
} else {
294312
return self.current_byte_offset();
@@ -311,13 +329,13 @@ impl<'a> Lexer<'a> {
311329
let start = self.current_byte_offset();
312330
// Eat all trivia because `next` doesn't eat trailing trivia.
313331
let (token, rest) = consume_token(self.input, false, self.save_comments);
314-
if let Token::Comment(_) = token {
332+
if let Token::CommentDoc(_) = token {
315333
self.input = rest;
316334
let next = self.current_byte_offset();
317335
comments.push(Span::new(start as u32, next as u32));
318336
} else if let Token::Trivia = token {
319337
self.input = rest;
320-
} else if let Token::CommentModule(_) = token {
338+
} else if let Token::CommentDocModule(_) = token {
321339
self.input = rest;
322340
} else {
323341
return self.current_byte_offset();
@@ -359,7 +377,7 @@ impl<'a> Lexer<'a> {
359377
|token| {
360378
!matches!(
361379
token,
362-
Token::Trivia | Token::Comment(_) | Token::CommentModule(_)
380+
Token::Trivia | Token::CommentDoc(_) | Token::CommentDocModule(_)
363381
)
364382
},
365383
generic,
@@ -564,7 +582,7 @@ fn sub_test_with_and_without_comments(source: &str, expected_tokens: &[Token]) {
564582
source,
565583
expected_tokens
566584
.iter()
567-
.filter(|v| !matches!(v, Token::Comment(_) | Token::CommentModule(_)))
585+
.filter(|v| !matches!(**v, Token::CommentDoc(_) | Token::CommentDocModule(_)))
568586
.cloned()
569587
.collect::<Vec<_>>()
570588
.as_slice(),
@@ -801,9 +819,8 @@ fn test_tokens() {
801819
"*/*/***/*//=/*****//",
802820
&[
803821
Token::Operation('*'),
804-
Token::Comment("/*/***/*/"),
805822
Token::AssignmentOperation('/'),
806-
Token::Comment("/*****/"),
823+
Token::CommentDoc("/*****/"),
807824
Token::Operation('/'),
808825
],
809826
);
@@ -871,47 +888,70 @@ fn test_variable_decl() {
871888
}
872889

873890
#[test]
874-
fn test_comments() {
875-
sub_test_with_and_without_comments("// Single comment", &[Token::Comment("// Single comment")]);
891+
fn test_comments_trivia() {
892+
sub_test_with_and_without_comments("// Single comment", &[]);
893+
876894
sub_test_with_and_without_comments(
877895
"/* multi
878896
line
879897
comment */",
880-
&[Token::Comment(
881-
"/* multi
898+
&[],
899+
);
900+
sub_test_with_and_without_comments(
901+
"/* multi
902+
line
903+
comment */
904+
// and another",
905+
&[],
906+
);
907+
}
908+
909+
#[test]
910+
fn test_comments() {
911+
sub_test_with_and_without_comments(
912+
"/// Single comment",
913+
&[Token::CommentDoc("/// Single comment")],
914+
);
915+
916+
sub_test_with_and_without_comments(
917+
"/** multi
918+
line
919+
comment */",
920+
&[Token::CommentDoc(
921+
"/** multi
882922
line
883923
comment */",
884924
)],
885925
);
886926
sub_test_with_and_without_comments(
887-
"/* multi
927+
"/** multi
888928
line
889929
comment */
890-
// and another",
930+
/// and another",
891931
&[
892-
Token::Comment(
893-
"/* multi
932+
Token::CommentDoc(
933+
"/** multi
894934
line
895935
comment */",
896936
),
897-
Token::Comment("// and another"),
937+
Token::CommentDoc("/// and another"),
898938
],
899939
);
900940
}
901941

902942
#[test]
903943
fn test_comment_nested() {
904944
sub_test_with_and_without_comments(
905-
"/*
906-
a comment with nested one /*
945+
"/**
946+
a comment with nested one /**
907947
nested comment
908948
*/
909949
*/
910950
const a : i32 = 2;",
911951
&[
912-
Token::Comment(
913-
"/*
914-
a comment with nested one /*
952+
Token::CommentDoc(
953+
"/**
954+
a comment with nested one /**
915955
nested comment
916956
*/
917957
*/",
@@ -930,14 +970,14 @@ fn test_comment_nested() {
930970
#[test]
931971
fn test_comment_long_character() {
932972
sub_test_with_and_without_comments(
933-
"// π/2
934-
// D(𝐡) = ───────────────────────────────────────────────────
935-
// παₜα_b((𝐡 ⋅ 𝐭)² / αₜ²) + (𝐡 ⋅ 𝐛)² / α_b² +`
973+
"/// π/2
974+
/// D(𝐡) = ───────────────────────────────────────────────────
975+
/// παₜα_b((𝐡 ⋅ 𝐭)² / αₜ²) + (𝐡 ⋅ 𝐛)² / α_b² +`
936976
const a : i32 = 2;",
937977
&[
938-
Token::Comment("// π/2"),
939-
Token::Comment("// D(𝐡) = ───────────────────────────────────────────────────"),
940-
Token::Comment("// παₜα_b((𝐡 ⋅ 𝐭)² / αₜ²) + (𝐡 ⋅ 𝐛)² / α_b² +`"),
978+
Token::CommentDoc("/// π/2"),
979+
Token::CommentDoc("/// D(𝐡) = ───────────────────────────────────────────────────"),
980+
Token::CommentDoc("/// παₜα_b((𝐡 ⋅ 𝐭)² / αₜ²) + (𝐡 ⋅ 𝐛)² / α_b² +`"),
941981
Token::Word("const"),
942982
Token::Word("a"),
943983
Token::Separator(':'),
@@ -950,22 +990,27 @@ fn test_comment_long_character() {
950990
}
951991

952992
#[test]
953-
fn test_module_comments() {
993+
fn test_comments_module() {
954994
sub_test_with_and_without_comments(
955995
"//! Comment Module
956996
//! Another one.
957-
// Trying to break module comment
997+
/*! Different module comment */
998+
/// Trying to break module comment
999+
// Trying to break module comment again
9581000
//! After a regular comment is ok.
1001+
/*! Different module comment again */
9591002
9601003
//! After a break is supported.
9611004
const
9621005
//! After anything else is not.",
9631006
&[
964-
Token::CommentModule("//! Comment Module"),
965-
Token::CommentModule("//! Another one."),
966-
Token::Comment("// Trying to break module comment"),
967-
Token::CommentModule("//! After a regular comment is ok."),
968-
Token::CommentModule("//! After a break is supported."),
1007+
Token::CommentDocModule("//! Comment Module"),
1008+
Token::CommentDocModule("//! Another one."),
1009+
Token::CommentDocModule("/*! Different module comment */"),
1010+
Token::CommentDoc("/// Trying to break module comment"),
1011+
Token::CommentDocModule("//! After a regular comment is ok."),
1012+
Token::CommentDocModule("/*! Different module comment again */"),
1013+
Token::CommentDocModule("//! After a break is supported."),
9691014
Token::Word("const"),
9701015
],
9711016
);

naga/src/front/wgsl/parse/mod.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -2860,7 +2860,7 @@ impl Parser {
28602860
let token = cloned.next_until(|_| true, false);
28612861
token
28622862
}
2863-
while let (Token::CommentModule(_), span) = peek_any_next(&lexer) {
2863+
while let (Token::CommentDocModule(_), span) = peek_any_next(&lexer) {
28642864
comments.push(lexer.source.index(span));
28652865
let _ = lexer.next_until(|_| true, false);
28662866
}

0 commit comments

Comments
 (0)