@@ -19,13 +19,47 @@ pub struct TokenStream<'a> {
19
19
}
20
20
21
21
impl < ' a > TokenStream < ' a > {
22
+ /// Special handling for a tool directive of the form
23
+ /// ```vhdl
24
+ /// `identifier { any chars until newline }
25
+ /// ```
26
+ /// This needs special handling as the text that follows the identifier is arbitrary.
27
+ fn handle_tool_directive (
28
+ grave_accent : Token ,
29
+ tokenizer : & mut Tokenizer ,
30
+ diagnostics : & mut dyn DiagnosticHandler ,
31
+ ) {
32
+ let start_pos = grave_accent. pos . clone ( ) ;
33
+ match tokenizer. pop ( ) {
34
+ Ok ( Some ( tok) ) => {
35
+ if tok. kind != Identifier {
36
+ diagnostics. error ( tok, "Expecting identifier" ) ;
37
+ let _ = tokenizer. text_until_newline ( ) ; // skip potentially invalid tokens
38
+ return ;
39
+ }
40
+ }
41
+ Err ( err) => diagnostics. push ( err) ,
42
+ Ok ( None ) => {
43
+ diagnostics. error ( start_pos, "Expecting identifier" ) ;
44
+ return ;
45
+ }
46
+ }
47
+ match tokenizer. text_until_newline ( ) {
48
+ Ok ( _) => { }
49
+ Err ( err) => diagnostics. push ( err) ,
50
+ }
51
+ }
52
+
22
53
pub fn new (
23
54
mut tokenizer : Tokenizer < ' a > ,
24
55
diagnostics : & mut dyn DiagnosticHandler ,
25
56
) -> TokenStream < ' a > {
26
57
let mut tokens = Vec :: new ( ) ;
27
58
loop {
28
59
match tokenizer. pop ( ) {
60
+ Ok ( Some ( token) ) if token. kind == GraveAccent => {
61
+ TokenStream :: handle_tool_directive ( token, & mut tokenizer, diagnostics)
62
+ }
29
63
Ok ( Some ( token) ) => tokens. push ( token) ,
30
64
Ok ( None ) => break ,
31
65
Err ( err) => diagnostics. push ( err) ,
@@ -259,6 +293,12 @@ mod tests {
259
293
let tokenizer = Tokenizer :: new( & $code. symbols, source, ContentReader :: new( & contents) ) ;
260
294
let $stream = TokenStream :: new( tokenizer, & mut NoDiagnostics ) ;
261
295
} ;
296
+ ( $code: ident, $stream: ident, $diagnostics: ident) => {
297
+ let source = $code. source( ) ;
298
+ let contents = source. contents( ) ;
299
+ let tokenizer = Tokenizer :: new( & $code. symbols, source, ContentReader :: new( & contents) ) ;
300
+ let $stream = TokenStream :: new( tokenizer, & mut $diagnostics) ;
301
+ } ;
262
302
}
263
303
264
304
#[ test]
@@ -388,4 +428,38 @@ mod tests {
388
428
assert ! ( stream. skip_until( |ref k| matches!( k, Plus ) ) . is_ok( ) ) ;
389
429
assert_eq ! ( stream. peek( ) . map( |t| t. kind) , Some ( Plus ) ) ;
390
430
}
431
+
432
+ #[ test]
433
+ fn tokenize_simple_identifier_directive ( ) {
434
+ let code = Code :: new ( "`protect begin" ) ;
435
+ new_stream ! ( code, _stream) ;
436
+ }
437
+
438
+ #[ test]
439
+ fn tokenize_extended_identifier_directive ( ) {
440
+ let code = Code :: new ( "`\\ extended ident\\ begin other words" ) ;
441
+ new_stream ! ( code, _stream) ;
442
+ }
443
+
444
+ #[ test]
445
+ fn tokenize_directive_illegal_identifier ( ) {
446
+ let code = Code :: new ( "`123 begin other words" ) ;
447
+ let mut diagnostics: Vec < Diagnostic > = vec ! [ ] ;
448
+ new_stream ! ( code, _stream, diagnostics) ;
449
+ assert_eq ! (
450
+ diagnostics,
451
+ vec![ Diagnostic :: error( code. s1( "123" ) , "Expecting identifier" ) ]
452
+ )
453
+ }
454
+
455
+ #[ test]
456
+ fn tokenize_directive_then_end_of_stream ( ) {
457
+ let code = Code :: new ( "`" ) ;
458
+ let mut diagnostics: Vec < Diagnostic > = vec ! [ ] ;
459
+ new_stream ! ( code, _stream, diagnostics) ;
460
+ assert_eq ! (
461
+ diagnostics,
462
+ vec![ Diagnostic :: error( code. s1( "`" ) , "Expecting identifier" ) ]
463
+ )
464
+ }
391
465
}
0 commit comments