4
4
//! governing what is and is not a valid token are defined in the Python reference
5
5
//! guide section on [Lexical analysis].
6
6
//!
7
- //! The primary function in this module is [`make_tokenizer `], which takes a string slice
7
+ //! The primary function in this module is [`lex `], which takes a string slice
8
8
//! and returns an iterator over the tokens in the source code. The tokens are currently returned
9
9
//! as a `Result<Spanned, LexicalError>`, where [`Spanned`] is a tuple containing the
10
10
//! start and end [`Location`] and a [`Tok`] denoting the token.
11
11
//!
12
12
//! # Example
13
13
//!
14
14
//! ```
15
- //! use rustpython_parser::lexer::{make_tokenizer , Tok};
15
+ //! use rustpython_parser::lexer::{lex , Tok};
16
16
//! use rustpython_parser::mode::Mode;
17
17
//! use rustpython_parser::token::StringKind;
18
18
//!
19
19
//! let source = "x = 'RustPython'";
20
- //! let tokens = make_tokenizer (source, Mode::Module)
20
+ //! let tokens = lex (source, Mode::Module)
21
21
//! .map(|tok| tok.expect("Failed to lex"))
22
22
//! .collect::<Vec<_>>();
23
23
//!
@@ -195,29 +195,29 @@ pub type Spanned = (Location, Tok, Location);
195
195
/// The result of lexing a token.
196
196
pub type LexResult = Result < Spanned , LexicalError > ;
197
197
198
- /// Create a new tokenizer from a source string.
198
+ /// Create a new lexer from a source string.
199
199
///
200
200
/// # Examples
201
201
///
202
202
/// ```
203
203
/// use rustpython_parser::mode::Mode;
204
- /// use rustpython_parser::lexer::{make_tokenizer };
204
+ /// use rustpython_parser::lexer::{lex };
205
205
///
206
206
/// let source = "def hello(): return 'world'";
207
- /// let tokenizer = make_tokenizer (source, Mode::Module);
207
+ /// let lexer = lex (source, Mode::Module);
208
208
///
209
- /// for token in tokenizer {
209
+ /// for token in lexer {
210
210
/// println!("{:?}", token);
211
211
/// }
212
212
/// ```
213
213
#[ inline]
214
- pub fn make_tokenizer ( source : & str , mode : Mode ) -> impl Iterator < Item = LexResult > + ' _ {
215
- make_tokenizer_located ( source, mode, Location :: default ( ) )
214
+ pub fn lex ( source : & str , mode : Mode ) -> impl Iterator < Item = LexResult > + ' _ {
215
+ lex_located ( source, mode, Location :: default ( ) )
216
216
}
217
217
218
- /// Create a new tokenizer from a source string, starting at a given location.
219
- /// You probably want to use [`make_tokenizer `] instead.
220
- pub fn make_tokenizer_located (
218
+ /// Create a new lexer from a source string, starting at a given location.
219
+ /// You probably want to use [`lex `] instead.
220
+ pub fn lex_located (
221
221
source : & str ,
222
222
mode : Mode ,
223
223
start_location : Location ,
@@ -230,7 +230,7 @@ where
230
230
T : Iterator < Item = char > ,
231
231
{
232
232
/// Create a new lexer from T and a starting location. You probably want to use
233
- /// [`make_tokenizer `] instead.
233
+ /// [`lex `] instead.
234
234
pub fn new ( input : T , start : Location ) -> Self {
235
235
let mut lxr = Lexer {
236
236
at_begin_of_line : true ,
@@ -1320,7 +1320,7 @@ impl std::fmt::Display for LexicalErrorType {
1320
1320
1321
1321
#[ cfg( test) ]
1322
1322
mod tests {
1323
- use super :: { make_tokenizer , StringKind , Tok } ;
1323
+ use super :: { lex , StringKind , Tok } ;
1324
1324
use crate :: mode:: Mode ;
1325
1325
use num_bigint:: BigInt ;
1326
1326
@@ -1329,7 +1329,7 @@ mod tests {
1329
1329
const UNIX_EOL : & str = "\n " ;
1330
1330
1331
1331
pub fn lex_source ( source : & str ) -> Vec < Tok > {
1332
- let lexer = make_tokenizer ( source, Mode :: Module ) ;
1332
+ let lexer = lex ( source, Mode :: Module ) ;
1333
1333
lexer. map ( |x| x. unwrap ( ) . 1 ) . collect ( )
1334
1334
}
1335
1335
0 commit comments