Skip to content

Commit 7df01d3

Browse files
committed
pymodule tokenize
1 parent 4728ff5 commit 7df01d3

File tree

1 file changed

+21
-25
lines changed

1 file changed

+21
-25
lines changed

vm/src/stdlib/tokenize.rs

Lines changed: 21 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,31 +1,27 @@
11
/*
22
* python tokenize module.
33
*/
4-
5-
use std::iter::FromIterator;
6-
7-
use crate::obj::objstr::PyStringRef;
8-
use crate::pyobject::{BorrowValue, PyObjectRef, PyResult};
9-
use crate::vm::VirtualMachine;
10-
use rustpython_parser::lexer;
11-
12-
fn tokenize_tokenize(s: PyStringRef, vm: &VirtualMachine) -> PyResult {
13-
let source = s.borrow_value();
14-
15-
// TODO: implement generator when the time has come.
16-
let lexer1 = lexer::make_tokenizer(source);
17-
18-
let tokens = lexer1.map(|st| vm.ctx.new_str(format!("{:?}", st.unwrap().1)));
19-
let tokens = Vec::from_iter(tokens);
20-
Ok(vm.ctx.new_list(tokens))
21-
}
4+
pub(crate) use decl::make_module;
225

236
// TODO: create main function when called with -m
24-
25-
pub fn make_module(vm: &VirtualMachine) -> PyObjectRef {
26-
let ctx = &vm.ctx;
27-
28-
py_module!(vm, "tokenize", {
29-
"tokenize" => ctx.new_function(tokenize_tokenize)
30-
})
7+
#[pymodule(name = "tokenize")]
8+
mod decl {
9+
use std::iter::FromIterator;
10+
11+
use crate::obj::objstr::PyStringRef;
12+
use crate::pyobject::{BorrowValue, PyResult};
13+
use crate::vm::VirtualMachine;
14+
use rustpython_parser::lexer;
15+
16+
#[pyfunction]
17+
fn tokenize(s: PyStringRef, vm: &VirtualMachine) -> PyResult {
18+
let source = s.borrow_value();
19+
20+
// TODO: implement generator when the time has come.
21+
let lexer1 = lexer::make_tokenizer(source);
22+
23+
let tokens = lexer1.map(|st| vm.ctx.new_str(format!("{:?}", st.unwrap().1)));
24+
let tokens = Vec::from_iter(tokens);
25+
Ok(vm.ctx.new_list(tokens))
26+
}
3127
}

0 commit comments

Comments
 (0)