Ajout de tests unitaires et réorganisation des fichiers

This commit is contained in:
2024-04-22 08:31:25 +02:00
parent f6091f3d2a
commit 5d5928d4ef
9 changed files with 219 additions and 66 deletions

View File

@@ -2,6 +2,12 @@
name = "rlox"
version = "0.1.0"
edition = "2021"
authors = ["Emmanuel BERNAT <manu@bernat.me>"]
license = "MIT"
description = "Lox language parser."
repository = "https://git.bernat.me/rlox"
documentation = ""
readme = ""
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@@ -1,13 +1,17 @@
use std::env;
use std::io::Write;
//use std::io::Write;
use std::process;
mod rlox;
use crate::rlox::rlox_interpreter::RLoxInterpreter;
mod rlox_interpreter;
mod scanner;
mod token;
mod token_type;
use crate::rlox_interpreter::RLoxInterpreter;
// Exit codes from #include <sysexits.h>
const EX_OK: i32 = 0;
const EX_DATAERR: i32 = 65;
//const EX_DATAERR: i32 = 65;
const EX_USAGE : i32 = 66;
fn main() {

View File

@@ -1,6 +0,0 @@
pub mod rlox_interpreter;
mod scanner;
mod token;
mod token_type;

View File

@@ -1,16 +0,0 @@
use crate::rlox::token_type::TokenType;
#[derive(Debug)]
pub struct Token {
pub token_type: TokenType,
pub lexeme: String,
pub literal: String,
pub line: u32,
}
impl std::fmt::Display for Token {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{0} {1} {2}", self.token_type, self.lexeme, self.literal)
}
}

View File

@@ -1,26 +1,14 @@
use std::{fs, io};
use crate::{EX_DATAERR, EX_OK};
use crate::EX_OK;
use std::io::Write;
use crate::rlox::scanner::Scanner;
use crate::scanner::Scanner;
pub struct RLoxInterpreter {
pub had_error: bool,
}
pub struct RLoxInterpreter;
impl RLoxInterpreter {
pub fn new() -> Self {
RLoxInterpreter {
had_error: false,
}
}
fn error(&self, line: u32, message: String) {
self.report(line, String::from(""), message);
}
fn report(&self, line: u32, place: String, message: String) {
println!("[line {line}] Error {place}: {message}");
RLoxInterpreter
}
pub fn run_file(&self, file_path: &str ) -> i32 {
@@ -53,7 +41,7 @@ impl RLoxInterpreter {
fn run(&self, src: String) -> i32 {
let mut scanner = Scanner::new( src );
scanner.scan_token();
scanner.scan_tokens();
let mut current_line = 0;
for t in scanner.tokens {
if t.line!=current_line {
@@ -63,6 +51,6 @@ impl RLoxInterpreter {
println!("{}\t{}\t{}", t.token_type, t.lexeme, t.literal);
}
if self.had_error { EX_DATAERR } else { EX_OK }
EX_OK
}
}

View File

@@ -1,6 +1,6 @@
use std::collections::HashMap;
use crate::rlox::token::Token;
use crate::rlox::token_type::TokenType;
use crate::token::Token;
use crate::token_type::TokenType;
fn is_digit(c: char) -> bool {
c >= '0' && c <= '9'
@@ -23,18 +23,27 @@ pub struct Scanner {
line: u32,
keywords: HashMap<String, TokenType>,
had_error: bool,
}
impl Scanner {
pub fn new(src: String) -> Self {
Self {
let mut scanner = Self {
source: src.chars().collect::<Vec<_>>(),
tokens: vec![],
start: 0,
current: 0,
line: 0,
keywords: HashMap::new(),
}
had_error: false
};
scanner.init_keywords();
scanner
}
fn error(&mut self, line: u32, message: String) {
self.had_error = true;
println!("[line {line}] Error : {message}");
}
fn init_keywords(&mut self) {
@@ -57,7 +66,7 @@ impl Scanner {
self.keywords.insert(String::from("while"), TokenType::While);
}
fn scan_tokens(&mut self) {
pub fn scan_tokens(&mut self) {
while !self.is_at_end() {
self.start = self.current;
self.scan_token();
@@ -68,7 +77,8 @@ impl Scanner {
}
fn is_at_end(&self) -> bool {
self.current >= self.source.len()
let at_end = self.current >= self.source.len();
at_end
}
pub fn scan_token(&mut self) {
@@ -109,7 +119,7 @@ impl Scanner {
} else if is_alpha(c) {
self.identifier();
} else {
// Erreur : lexeme inconnu
self.error(self.line, String::from("Unexpected token"));
}
}
}
@@ -117,7 +127,7 @@ impl Scanner {
fn advance(&mut self) -> char {
self.current += 1;
self.source[self.current]
self.source[self.current-1]
}
fn match_next(&mut self, expected: char) -> bool {
@@ -198,3 +208,147 @@ impl Scanner {
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_digit_and_alpha() {
assert!(is_digit('0'));
assert_eq!(is_digit('a'), false);
assert!(is_alpha('a'));
assert_eq!(is_alpha('🤣'), false);
assert!(is_alpha_numeric('0'));
assert!(is_alpha_numeric('a'));
assert_eq!(is_alpha_numeric('🤣'), false);
}
#[test]
fn test_static_keywords() {
let s = Scanner::new(String::from(""));
assert_eq!(s.keywords.len(), 16);
}
#[test]
fn test_is_at_end() {
let s = Scanner::new(String::from(""));
assert!(s.is_at_end());
let mut s2 = Scanner::new(String::from("1+2"));
assert_eq!(s2.is_at_end(), false);
s2.current = 3;
assert!(s2.is_at_end());
}
#[test]
fn test_advance() {
let mut s = Scanner::new(String::from("1+2"));
assert_eq!(s.current, 0);
let c = s.advance();
assert_eq!(c, '1');
assert_eq!(s.current, 1);
let c2 = s.advance();
assert_eq!(c2, '+');
assert_eq!(s.current, 2);
}
#[test]
fn test_match_next() {
let mut s = Scanner::new(String::from("1+2"));
let is_not_a_match = s.match_next('6');
assert_eq!(is_not_a_match, false);
assert_eq!(s.current, 0);
let is_a_match = s.match_next('1');
assert_eq!(is_a_match, true);
assert_eq!(s.current, 1);
}
#[test]
fn test_peek() {
let mut s = Scanner::new(String::from("1+2"));
assert_eq!(s.peek(), '1');
s.advance();
s.advance();
s.advance();
assert!(s.is_at_end());
assert_eq!(s.peek(), '\0');
}
#[test]
fn test_peek_next() {
let s = Scanner::new(String::from("1+2"));
assert_eq!(s.peek_next(), '+');
}
#[test]
fn test_string() {
let mut s = Scanner::new(String::from("\"hello\""));
s.advance();
s.string();
assert!(s.is_at_end());
}
#[test]
fn test_number() {
let mut s = Scanner::new(String::from("12345"));
s.number();
assert!(s.is_at_end());
}
#[test]
fn test_identifier() {
let mut s = Scanner::new(String::from("+id"));
s.identifier();
assert!(!s.is_at_end());
s.advance();
s.identifier();
assert!(s.is_at_end());
}
#[test]
fn test_add_simple_token() {
let mut s = Scanner::new(String::from(""));
s.add_simple_token(TokenType::LeftParen);
assert_eq!(s.tokens.len(), 1);
}
#[test]
fn test_add_token() {
let mut s = Scanner::new(String::from(""));
s.add_token(TokenType::Identifier, String::from("id"));
assert_eq!(s.tokens.len(), 1);
}
#[test]
fn test_scan_token() {
let mut s = Scanner::new(String::from("+"));
s.scan_token();
assert_eq!(s.tokens.len(), 1);
assert_eq!(s.tokens[0].token_type, TokenType::Plus);
}
#[test]
fn test_scan_tokens() {
let mut s = Scanner::new(String::from("1+id-\"toto\""));
s.scan_tokens();
assert_eq!(s.tokens.len(), 6);
assert_eq!(s.tokens[0].token_type, TokenType::Number);
assert_eq!(s.tokens[1].token_type, TokenType::Plus);
assert_eq!(s.tokens[2].token_type, TokenType::Identifier);
assert_eq!(s.tokens[3].token_type, TokenType::Minus);
assert_eq!(s.tokens[4].token_type, TokenType::String);
assert_eq!(s.tokens[5].token_type, TokenType::Eof);
assert_eq!(s.tokens[0].literal, String::from("1"));
assert_eq!(s.tokens[2].literal, String::from("id"));
assert_eq!(s.tokens[4].literal, String::from("toto"));
}
}

33
src/token.rs Normal file
View File

@@ -0,0 +1,33 @@
use crate::token_type::TokenType;
#[derive(Debug)]
pub struct Token {
pub token_type: TokenType,
pub lexeme: String,
pub literal: String,
pub line: u32,
}
impl std::fmt::Display for Token {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{0} {1} {2}", self.token_type, self.lexeme, self.literal)
}
}
#[cfg(test)]
mod tests {
use crate::token_type::TokenType;
use crate::token::Token;
#[test]
fn test_token() {
let t = Token {
token_type: TokenType::True,
lexeme: String::from("true"),
literal: String::from("true"),
line: 1,
};
assert_eq!(format!("{}",t), String::from("TRUE true true"));
}
}

View File

@@ -1,5 +1,3 @@
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum TokenType {
// Single-character tokens.
@@ -107,18 +105,18 @@ impl std::fmt::Display for TokenType {
#[cfg(test)]
mod tests {
use crate::rlox::token_type::TokenType;
use super::*;
#[test]
fn test_types() {
let t = TokenType::While;
assert_eq!(t, TokenType::While);
assert_eq!(format!("{}",t),String::from("WHILE"));
assert_eq!(format!("{}", t), String::from("WHILE"));
let t2 = TokenType::While;
let t3 = TokenType::Var;
assert_eq!(t, t2);
assert_ne!(t,t3);
assert_ne!(t, t3);
}
}
}

View File

@@ -1,8 +0,0 @@
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
let result = 2 + 2;
assert_eq!(result, 4);
}
}