Reconnaissance des mots réservés et des identificateurs

This commit is contained in:
2024-04-03 08:53:40 +02:00
parent 3fd9f2bd3a
commit 35c9ce4b5d
2 changed files with 55 additions and 5 deletions

View File

@@ -1,5 +1,22 @@
use super::token_type::TokenType; use super::token_type::TokenType;
use super::token::Token; use super::token::Token;
use std::collections::HashMap;
//scores.insert(String::from("Blue"), 10);
fn is_digit( c: char ) -> bool {
c>='0' && c<='9'
}
fn is_alpha( c: char ) -> bool {
(c>='a' && c<='z') || (c>='A' && c<='Z') || c=='_'
}
fn is_alpha_numeric ( c: char ) -> bool {
is_digit(c) || is_alpha(c)
}
struct Scanner { struct Scanner {
source: Vec<char>, source: Vec<char>,
@@ -8,13 +25,31 @@ struct Scanner {
start: usize, start: usize,
current: usize, current: usize,
line: u32, line: u32,
}
fn is_digit( c: char ) -> bool { keywords: HashMap<String, TokenType>,
c>='0' && c<='9'
} }
impl Scanner { impl Scanner {
fn init_keywords(&mut self) {
self.keywords = HashMap::new();
self.keywords.insert( String::from("and"), TokenType::And );
self.keywords.insert( String::from("class"), TokenType::Class );
self.keywords.insert( String::from("else"), TokenType::Else );
self.keywords.insert( String::from("false"), TokenType::False );
self.keywords.insert( String::from("for"), TokenType::For );
self.keywords.insert( String::from("fun"), TokenType::Fun );
self.keywords.insert( String::from("if"), TokenType::If );
self.keywords.insert( String::from("nil"), TokenType::Nil );
self.keywords.insert( String::from("or"), TokenType::Or );
self.keywords.insert( String::from("print"), TokenType::Print );
self.keywords.insert( String::from("return"), TokenType::Return );
self.keywords.insert( String::from("super"), TokenType::Super );
self.keywords.insert( String::from("this"), TokenType::This );
self.keywords.insert( String::from("true"), TokenType::True );
self.keywords.insert( String::from("var"), TokenType::Var );
self.keywords.insert( String::from("while"), TokenType::While );
}
fn scan_tokens(&mut self) { fn scan_tokens(&mut self) {
while !self.is_at_end() { while !self.is_at_end() {
self.start = self.current; self.start = self.current;
@@ -62,6 +97,8 @@ impl Scanner {
_ => { _ => {
if is_digit(c) { if is_digit(c) {
self.number(); self.number();
} else if is_alpha(c) {
self.identifier();
} else { } else {
// Erreur : lexeme inconnu // Erreur : lexeme inconnu
} }
@@ -139,4 +176,16 @@ impl Scanner {
self.add_token( TokenType::Number, self.source[self.start..self.current].into_iter().collect() ); // Il faudra faire un parse sur la chaîne pour connaître la valeur effective self.add_token( TokenType::Number, self.source[self.start..self.current].into_iter().collect() ); // Il faudra faire un parse sur la chaîne pour connaître la valeur effective
} }
fn identifier(&mut self) {
while is_alpha_numeric(self.peek()) {
self.advance();
}
let text: String = self.source[self.start..self.current].into_iter().collect();
match self.keywords.get(&text) {
Some( t ) => { self.add_simple_token( *t ) },
None => { self.add_token( TokenType::Identifier, text ) }
}
}
} }

View File

@@ -1,5 +1,5 @@
#[derive(Debug)] #[derive(Debug, Copy, Clone)]
pub enum TokenType { pub enum TokenType {
// Single-character tokens. // Single-character tokens.
LeftParen, LeftParen,
@@ -103,3 +103,4 @@ impl std::fmt::Display for crate::rlox::token_type::TokenType {
} }
} }
} }