Ajout de tests unitaires

This commit is contained in:
2024-04-05 08:07:10 +02:00
parent 16de0e0226
commit f6091f3d2a
5 changed files with 47 additions and 15 deletions

View File

@@ -1,4 +1,6 @@
pub mod rlox_interpreter; pub mod rlox_interpreter;
pub mod scanner; mod scanner;
pub mod token; mod token;
pub mod token_type; mod token_type;

View File

@@ -1,8 +1,9 @@
use std::{fs, io}; use std::{fs, io};
use crate::{EX_DATAERR, EX_OK}; use crate::{EX_DATAERR, EX_OK};
use crate::scanner::Scanner;
use std::io::Write; use std::io::Write;
use crate::rlox::scanner::Scanner;
pub struct RLoxInterpreter { pub struct RLoxInterpreter {
pub had_error: bool, pub had_error: bool,
} }
@@ -50,8 +51,18 @@ impl RLoxInterpreter {
} }
fn run(&self, src: String) -> i32 { fn run(&self, src: String) -> i32 {
let mut scanner = Scanner::create_scanner( src ); let mut scanner = Scanner::new( src );
let rlox_interpreter = RLoxInterpreter { had_error: false };
if rlox_interpreter.had_error { EX_DATAERR } else { EX_OK } scanner.scan_token();
let mut current_line = 0;
for t in scanner.tokens {
if t.line!=current_line {
current_line = t.line;
println!("-- line {} --------------------", current_line);
}
println!("{}\t{}\t{}", t.token_type, t.lexeme, t.literal);
}
if self.had_error { EX_DATAERR } else { EX_OK }
} }
} }

View File

@@ -1,6 +1,6 @@
use std::collections::HashMap; use std::collections::HashMap;
use crate::token::Token; use crate::rlox::token::Token;
use crate::token_type::TokenType; use crate::rlox::token_type::TokenType;
fn is_digit(c: char) -> bool { fn is_digit(c: char) -> bool {
c >= '0' && c <= '9' c >= '0' && c <= '9'
@@ -16,7 +16,7 @@ fn is_alpha_numeric(c: char) -> bool {
pub struct Scanner { pub struct Scanner {
source: Vec<char>, source: Vec<char>,
tokens: Vec<Token>, pub tokens: Vec<Token>,
start: usize, start: usize,
current: usize, current: usize,
@@ -26,14 +26,14 @@ pub struct Scanner {
} }
impl Scanner { impl Scanner {
pub fn create_scanner( src: String ) -> Self { pub fn new(src: String) -> Self {
Self { Self {
source: src.chars().collect::<Vec<_>>(), source: src.chars().collect::<Vec<_>>(),
tokens: vec![], tokens: vec![],
start: 0, start: 0,
current: 0, current: 0,
line: 0, line: 0,
keywords: HashMap::new() keywords: HashMap::new(),
} }
} }
@@ -71,7 +71,7 @@ impl Scanner {
self.current >= self.source.len() self.current >= self.source.len()
} }
fn scan_token(&mut self) { pub fn scan_token(&mut self) {
let c = self.advance(); let c = self.advance();
match c { match c {
'(' => self.add_simple_token(TokenType::LeftParen), '(' => self.add_simple_token(TokenType::LeftParen),

View File

@@ -1,4 +1,4 @@
use crate::token_type::TokenType; use crate::rlox::token_type::TokenType;
#[derive(Debug)] #[derive(Debug)]
pub struct Token { pub struct Token {
@@ -13,3 +13,4 @@ impl std::fmt::Display for Token {
write!(f, "{0} {1} {2}", self.token_type, self.lexeme, self.literal) write!(f, "{0} {1} {2}", self.token_type, self.lexeme, self.literal)
} }
} }

View File

@@ -1,6 +1,6 @@
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum TokenType { pub enum TokenType {
// Single-character tokens. // Single-character tokens.
LeftParen, LeftParen,
@@ -104,3 +104,21 @@ impl std::fmt::Display for TokenType {
} }
} }
} }
#[cfg(test)]
mod tests {
use crate::rlox::token_type::TokenType;
#[test]
fn test_types() {
let t = TokenType::While;
assert_eq!(t, TokenType::While);
assert_eq!(format!("{}",t),String::from("WHILE"));
let t2 = TokenType::While;
let t3 = TokenType::Var;
assert_eq!(t, t2);
assert_ne!(t,t3);
}
}