1
1
use std:: {
2
2
fmt:: Write ,
3
- path:: { Component , PathBuf } ,
3
+ path:: { Component , Path , PathBuf } ,
4
4
} ;
5
5
6
6
use test_utils:: { collect_tests, dir_tests, project_dir, read_text} ;
7
7
8
- use crate :: { fuzz, SourceFile } ;
8
+ use crate :: { fuzz, tokenize , Location , SourceFile , SyntaxError , TextRange , Token } ;
9
9
10
10
#[ test]
11
11
fn lexer_tests ( ) {
12
- dir_tests ( & test_data_dir ( ) , & [ "lexer" ] , |text, _| {
13
- // FIXME: add tests for errors (their format is up to discussion)
14
- let ( tokens, _errors) = crate :: tokenize ( text) ;
15
- dump_tokens ( & tokens, text)
16
- } )
12
+ // FIXME:
13
+ // * Add tests for unicode escapes in byte-character and [raw]-byte-string literals
14
+ // * Add tests for unescape errors
15
+
16
+ dir_tests ( & test_data_dir ( ) , & [ "lexer/ok" ] , |text, path| {
17
+ let ( tokens, errors) = tokenize ( text) ;
18
+ assert_errors_are_absent ( & errors, path) ;
19
+ dump_tokens_and_errors ( & tokens, & errors, text)
20
+ } ) ;
21
+ dir_tests ( & test_data_dir ( ) , & [ "lexer/err" ] , |text, path| {
22
+ let ( tokens, errors) = tokenize ( text) ;
23
+ assert_errors_are_present ( & errors, path) ;
24
+ dump_tokens_and_errors ( & tokens, & errors, text)
25
+ } ) ;
17
26
}
18
27
19
28
#[ test]
@@ -33,18 +42,13 @@ fn parser_tests() {
33
42
dir_tests ( & test_data_dir ( ) , & [ "parser/inline/ok" , "parser/ok" ] , |text, path| {
34
43
let parse = SourceFile :: parse ( text) ;
35
44
let errors = parse. errors ( ) ;
36
- assert_eq ! (
37
- errors,
38
- & [ ] as & [ crate :: SyntaxError ] ,
39
- "There should be no errors in the file {:?}" ,
40
- path. display( ) ,
41
- ) ;
45
+ assert_errors_are_absent ( & errors, path) ;
42
46
parse. debug_dump ( )
43
47
} ) ;
44
48
dir_tests ( & test_data_dir ( ) , & [ "parser/err" , "parser/inline/err" ] , |text, path| {
45
49
let parse = SourceFile :: parse ( text) ;
46
50
let errors = parse. errors ( ) ;
47
- assert ! ( ! errors. is_empty ( ) , "There should be errors in the file {:?}" , path. display ( ) ) ;
51
+ assert_errors_are_present ( & errors, path) ;
48
52
parse. debug_dump ( )
49
53
} ) ;
50
54
}
@@ -76,7 +80,7 @@ fn self_hosting_parsing() {
76
80
. into_iter ( )
77
81
. filter_entry ( |entry| {
78
82
!entry. path ( ) . components ( ) . any ( |component| {
79
- // Get all files which are not in the crates/ra_syntax/tests/data folder
83
+ // Get all files which are not in the crates/ra_syntax/test_data folder
80
84
component == Component :: Normal ( OsStr :: new ( "test_data" ) )
81
85
} )
82
86
} )
@@ -102,15 +106,47 @@ fn test_data_dir() -> PathBuf {
102
106
project_dir ( ) . join ( "crates/ra_syntax/test_data" )
103
107
}
104
108
105
- fn dump_tokens ( tokens : & [ crate :: Token ] , text : & str ) -> String {
109
+ fn assert_errors_are_present ( errors : & [ SyntaxError ] , path : & Path ) {
110
+ assert ! ( !errors. is_empty( ) , "There should be errors in the file {:?}" , path. display( ) ) ;
111
+ }
112
+ fn assert_errors_are_absent ( errors : & [ SyntaxError ] , path : & Path ) {
113
+ assert_eq ! (
114
+ errors,
115
+ & [ ] as & [ SyntaxError ] ,
116
+ "There should be no errors in the file {:?}" ,
117
+ path. display( ) ,
118
+ ) ;
119
+ }
120
+
121
+ fn dump_tokens_and_errors ( tokens : & [ Token ] , errors : & [ SyntaxError ] , text : & str ) -> String {
106
122
let mut acc = String :: new ( ) ;
107
123
let mut offset = 0 ;
108
124
for token in tokens {
109
- let len: u32 = token. len . into ( ) ;
110
- let len = len as usize ;
111
- let token_text = & text[ offset..offset + len] ;
112
- offset += len;
113
- write ! ( acc, "{:?} {} {:?}\n " , token. kind, token. len, token_text) . unwrap ( )
125
+ let token_len = token. len . to_usize ( ) ;
126
+ let token_text = & text[ offset..offset + token_len] ;
127
+ offset += token_len;
128
+ writeln ! ( acc, "{:?} {} {:?}" , token. kind, token_len, token_text) . unwrap ( ) ;
129
+ }
130
+ for err in errors {
131
+ let err_range = location_to_range ( err. location ( ) ) ;
132
+ writeln ! (
133
+ acc,
134
+ "> error{:?} token({:?}) msg({})" ,
135
+ err. location( ) ,
136
+ & text[ err_range] ,
137
+ err. kind( )
138
+ )
139
+ . unwrap ( ) ;
140
+ }
141
+ return acc;
142
+
143
+ // FIXME: copy-pasted this from `ra_ide/src/diagnostics.rs`
144
+ // `Location` will be refactored soon in new PR, see todos here:
145
+ // https://github.com/rust-analyzer/rust-analyzer/issues/223
146
+ fn location_to_range ( location : Location ) -> TextRange {
147
+ match location {
148
+ Location :: Offset ( offset) => TextRange :: offset_len ( offset, 1 . into ( ) ) ,
149
+ Location :: Range ( range) => range,
150
+ }
114
151
}
115
- acc
116
152
}
0 commit comments