|
1 | 1 | /*
|
2 | 2 | * python tokenize module.
|
3 | 3 | */
|
4 |
| - |
5 |
| -use std::iter::FromIterator; |
6 |
| - |
7 |
| -use crate::obj::objstr::PyStringRef; |
8 |
| -use crate::pyobject::{BorrowValue, PyObjectRef, PyResult}; |
9 |
| -use crate::vm::VirtualMachine; |
10 |
| -use rustpython_parser::lexer; |
11 |
| - |
12 |
| -fn tokenize_tokenize(s: PyStringRef, vm: &VirtualMachine) -> PyResult { |
13 |
| - let source = s.borrow_value(); |
14 |
| - |
15 |
| - // TODO: implement generator when the time has come. |
16 |
| - let lexer1 = lexer::make_tokenizer(source); |
17 |
| - |
18 |
| - let tokens = lexer1.map(|st| vm.ctx.new_str(format!("{:?}", st.unwrap().1))); |
19 |
| - let tokens = Vec::from_iter(tokens); |
20 |
| - Ok(vm.ctx.new_list(tokens)) |
21 |
| -} |
| 4 | +pub(crate) use decl::make_module; |
22 | 5 |
|
23 | 6 | // TODO: create main function when called with -m
|
24 |
| - |
25 |
| -pub fn make_module(vm: &VirtualMachine) -> PyObjectRef { |
26 |
| - let ctx = &vm.ctx; |
27 |
| - |
28 |
| - py_module!(vm, "tokenize", { |
29 |
| - "tokenize" => ctx.new_function(tokenize_tokenize) |
30 |
| - }) |
| 7 | +#[pymodule(name = "tokenize")] |
| 8 | +mod decl { |
| 9 | + use std::iter::FromIterator; |
| 10 | + |
| 11 | + use crate::obj::objstr::PyStringRef; |
| 12 | + use crate::pyobject::{BorrowValue, PyResult}; |
| 13 | + use crate::vm::VirtualMachine; |
| 14 | + use rustpython_parser::lexer; |
| 15 | + |
| 16 | + #[pyfunction] |
| 17 | + fn tokenize(s: PyStringRef, vm: &VirtualMachine) -> PyResult { |
| 18 | + let source = s.borrow_value(); |
| 19 | + |
| 20 | + // TODO: implement generator when the time has come. |
| 21 | + let lexer1 = lexer::make_tokenizer(source); |
| 22 | + |
| 23 | + let tokens = lexer1.map(|st| vm.ctx.new_str(format!("{:?}", st.unwrap().1))); |
| 24 | + let tokens = Vec::from_iter(tokens); |
| 25 | + Ok(vm.ctx.new_list(tokens)) |
| 26 | + } |
31 | 27 | }
|
0 commit comments