Skip to content

Commit

Permalink
Merge pull request #3 from funs-lang/lex-float
Browse files Browse the repository at this point in the history
lex: add `float` support
  • Loading branch information
FedericoBruzzone authored Aug 12, 2024
2 parents 7f68a7f + c8a600e commit 610b700
Show file tree
Hide file tree
Showing 12 changed files with 272 additions and 14 deletions.
1 change: 1 addition & 0 deletions examples/first.fs
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,4 @@ _x_int: int = 0


_x_int: int = 0
_x_float: float = 0.1
4 changes: 2 additions & 2 deletions src/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ impl Iterator for Lexer {
}
}

#[derive(Clone, Debug, Eq, PartialEq)]
#[derive(Clone, Debug, PartialEq)]
pub enum LexerError {
UnexpectedToken(Token),
}
Expand Down Expand Up @@ -133,7 +133,7 @@ mod tests {
#[test]
fn identifier() {
let fs_files = collect_fs_files("./testdata/identifier", true);
assert_eq!(fs_files.len(), 12);
assert_eq!(fs_files.len(), 15);

for path in fs_files {
info!("file -> {:?}", path);
Expand Down
23 changes: 17 additions & 6 deletions src/lexer/states.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ impl State for StateStart {
TransitionKind::Consume,
)),
Some(c) if c.is_ascii_digit() => Ok(Lexer::proceed(
Box::new(StateNumber),
Box::new(StateNumber { is_float: false }),
TransitionKind::AdvanceOffset,
)),
Some(c) if c.is_alphabetic() || c.eq(&'_') => Ok(Lexer::proceed(
Expand Down Expand Up @@ -108,24 +108,35 @@ impl State for StateComment {
}

#[derive(Debug)]
pub struct StateNumber;
pub struct StateNumber {
is_float: bool,
}

impl State for StateNumber {
fn visit(&self, cursor: &mut Cursor) -> Result<Transition, LexerError> {
match cursor.peek() {
Some(c) if c.is_ascii_digit() => Ok(Lexer::proceed(
Box::new(StateNumber),
Box::new(StateNumber {
is_float: self.is_float,
}),
TransitionKind::AdvanceOffset,
)),
Some(c) if c.eq(&'.') => Ok(Lexer::proceed(
Box::new(StateNumber { is_float: true }),
TransitionKind::AdvanceOffset,
)),
_ => {
let lexeme = cursor.source().content()[cursor.index()..cursor.offset()].to_string();
let location = cursor.location().clone();
let token_kind = if self.is_float {
TokenKind::TokenLiteral(Literal::Float(lexeme.parse().unwrap()))
} else {
TokenKind::TokenLiteral(Literal::Int(lexeme.parse().unwrap()))
};
Ok(Transition {
state: Box::new(StateStart),
transition_kind: TransitionKind::EmitToken(Token::new(
TokenKind::TokenLiteral(Literal::Int(lexeme.parse().unwrap())),
lexeme,
location,
token_kind, lexeme, location,
)),
})
}
Expand Down
10 changes: 7 additions & 3 deletions src/lexer/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,17 @@ use serde::{Deserialize, Serialize};
use std::path::{Path, PathBuf};

const KEYWORD_INT: &str = "int";
const KEYWORD_FLOAT: &str = "float";
const SEPARATOR_COLON: &str = ":";
const SEPARATOR_ASSIGN: &str = "=";

#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
pub enum Literal {
Int(i64),
Float(f64),
}

#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize)]
pub enum TokenKind {
TokenLiteral(Literal),
TokenIdentifier,
Expand All @@ -26,6 +28,7 @@ impl TokenKind {
fn match_keyword(lexeme: &str) -> Option<TokenKind> {
match lexeme {
KEYWORD_INT => Some(TokenKind::TokenKeyword),
KEYWORD_FLOAT => Some(TokenKind::TokenKeyword),
_ => None,
}
}
Expand Down Expand Up @@ -159,7 +162,7 @@ impl From<&PathBuf> for TokenLocation {
}
}

#[derive(Clone, Debug, Eq, PartialEq, serde::Deserialize, serde::Serialize)]
#[derive(Clone, Debug, PartialEq, serde::Deserialize, serde::Serialize)]
pub struct Token {
/// The kind of the token
kind: TokenKind,
Expand Down Expand Up @@ -199,6 +202,7 @@ impl std::fmt::Display for Literal {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Literal::Int(value) => write!(f, "Int({})", value),
Literal::Float(value) => write!(f, "Float({})", value),
}
}
}
Expand Down
1 change: 1 addition & 0 deletions testdata/identifier/id_float_assign.fs
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
_x_float: float = 0.0
76 changes: 76 additions & 0 deletions testdata/identifier/id_float_assign.tokens
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
[
{
"kind": "TokenIdentifier",
"lexeme": "_x_float",
"location": {
"file_path": "",
"line": 0,
"column_start": 0,
"column_end": 8
}
},
{
"kind": "TokenColon",
"lexeme": ":",
"location": {
"file_path": "",
"line": 0,
"column_start": 8,
"column_end": 9
}
},
{
"kind": "TokenKeyword",
"lexeme": "float",
"location": {
"file_path": "",
"line": 0,
"column_start": 10,
"column_end": 15
}
},
{
"kind": "TokenAssign",
"lexeme": "=",
"location": {
"file_path": "",
"line": 0,
"column_start": 16,
"column_end": 17
}
},
{
"kind": {
"TokenLiteral": {
"Float": 0.0
}
},
"lexeme": "0.0",
"location": {
"file_path": "",
"line": 0,
"column_start": 18,
"column_end": 21
}
},
{
"kind": "TokenNewLine",
"lexeme": "\\n",
"location": {
"file_path": "",
"line": 0,
"column_start": 21,
"column_end": 21
}
},
{
"kind": "TokenEOF",
"lexeme": "",
"location": {
"file_path": "",
"line": 1,
"column_start": 0,
"column_end": 0
}
}
]
1 change: 1 addition & 0 deletions testdata/identifier/id_float_assign_2.fs
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
_x_float: float = 73.20
76 changes: 76 additions & 0 deletions testdata/identifier/id_float_assign_2.tokens
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
[
{
"kind": "TokenIdentifier",
"lexeme": "_x_float",
"location": {
"file_path": "",
"line": 0,
"column_start": 0,
"column_end": 8
}
},
{
"kind": "TokenColon",
"lexeme": ":",
"location": {
"file_path": "",
"line": 0,
"column_start": 8,
"column_end": 9
}
},
{
"kind": "TokenKeyword",
"lexeme": "float",
"location": {
"file_path": "",
"line": 0,
"column_start": 10,
"column_end": 15
}
},
{
"kind": "TokenAssign",
"lexeme": "=",
"location": {
"file_path": "",
"line": 0,
"column_start": 16,
"column_end": 17
}
},
{
"kind": {
"TokenLiteral": {
"Float": 73.20
}
},
"lexeme": "73.20",
"location": {
"file_path": "",
"line": 0,
"column_start": 18,
"column_end": 23
}
},
{
"kind": "TokenNewLine",
"lexeme": "\\n",
"location": {
"file_path": "",
"line": 0,
"column_start": 23,
"column_end": 23
}
},
{
"kind": "TokenEOF",
"lexeme": "",
"location": {
"file_path": "",
"line": 1,
"column_start": 0,
"column_end": 0
}
}
]
2 changes: 1 addition & 1 deletion testdata/identifier/id_int_assign.fs
Original file line number Diff line number Diff line change
@@ -1 +1 @@
_x_int: int = 0
_x_int: int = 0
14 changes: 12 additions & 2 deletions testdata/identifier/id_int_assign.tokens
Original file line number Diff line number Diff line change
Expand Up @@ -54,14 +54,24 @@
}
},
{
"kind": "TokenEOF",
"lexeme": "",
"kind": "TokenNewLine",
"lexeme": "\\n",
"location": {
"file_path": "",
"line": 0,
"column_start": 15,
"column_end": 15
}
},
{
"kind": "TokenEOF",
"lexeme": "",
"location": {
"file_path": "",
"line": 1,
"column_start": 0,
"column_end": 0
}
}
]

1 change: 1 addition & 0 deletions testdata/identifier/id_int_assign_2.fs
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
_x_int: int = 732
77 changes: 77 additions & 0 deletions testdata/identifier/id_int_assign_2.tokens
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
[
{
"kind": "TokenIdentifier",
"lexeme": "_x_int",
"location": {
"file_path": "",
"line": 0,
"column_start": 0,
"column_end": 6
}
},
{
"kind": "TokenColon",
"lexeme": ":",
"location": {
"file_path": "",
"line": 0,
"column_start": 6,
"column_end": 7
}
},
{
"kind": "TokenKeyword",
"lexeme": "int",
"location": {
"file_path": "",
"line": 0,
"column_start": 8,
"column_end": 11
}
},
{
"kind": "TokenAssign",
"lexeme": "=",
"location": {
"file_path": "",
"line": 0,
"column_start": 12,
"column_end": 13
}
},
{
"kind": {
"TokenLiteral": {
"Int": 732
}
},
"lexeme": "732",
"location": {
"file_path": "",
"line": 0,
"column_start": 14,
"column_end": 17
}
},
{
"kind": "TokenNewLine",
"lexeme": "\\n",
"location": {
"file_path": "",
"line": 0,
"column_start": 17,
"column_end": 17
}
},
{
"kind": "TokenEOF",
"lexeme": "",
"location": {
"file_path": "",
"line": 1,
"column_start": 0,
"column_end": 0
}
}
]

0 comments on commit 610b700

Please sign in to comment.