Reconnaissance des mots réservés et des identificateurs
This commit is contained in:
@@ -1,5 +1,22 @@
|
||||
use super::token_type::TokenType;
|
||||
use super::token::Token;
|
||||
use std::collections::HashMap;
|
||||
|
||||
|
||||
//scores.insert(String::from("Blue"), 10);
|
||||
|
||||
|
||||
fn is_digit( c: char ) -> bool {
|
||||
c>='0' && c<='9'
|
||||
}
|
||||
|
||||
fn is_alpha( c: char ) -> bool {
|
||||
(c>='a' && c<='z') || (c>='A' && c<='Z') || c=='_'
|
||||
}
|
||||
|
||||
fn is_alpha_numeric ( c: char ) -> bool {
|
||||
is_digit(c) || is_alpha(c)
|
||||
}
|
||||
|
||||
struct Scanner {
|
||||
source: Vec<char>,
|
||||
@@ -8,13 +25,31 @@ struct Scanner {
|
||||
start: usize,
|
||||
current: usize,
|
||||
line: u32,
|
||||
}
|
||||
|
||||
fn is_digit( c: char ) -> bool {
|
||||
c>='0' && c<='9'
|
||||
keywords: HashMap<String, TokenType>,
|
||||
}
|
||||
|
||||
impl Scanner {
|
||||
fn init_keywords(&mut self) {
|
||||
self.keywords = HashMap::new();
|
||||
self.keywords.insert( String::from("and"), TokenType::And );
|
||||
self.keywords.insert( String::from("class"), TokenType::Class );
|
||||
self.keywords.insert( String::from("else"), TokenType::Else );
|
||||
self.keywords.insert( String::from("false"), TokenType::False );
|
||||
self.keywords.insert( String::from("for"), TokenType::For );
|
||||
self.keywords.insert( String::from("fun"), TokenType::Fun );
|
||||
self.keywords.insert( String::from("if"), TokenType::If );
|
||||
self.keywords.insert( String::from("nil"), TokenType::Nil );
|
||||
self.keywords.insert( String::from("or"), TokenType::Or );
|
||||
self.keywords.insert( String::from("print"), TokenType::Print );
|
||||
self.keywords.insert( String::from("return"), TokenType::Return );
|
||||
self.keywords.insert( String::from("super"), TokenType::Super );
|
||||
self.keywords.insert( String::from("this"), TokenType::This );
|
||||
self.keywords.insert( String::from("true"), TokenType::True );
|
||||
self.keywords.insert( String::from("var"), TokenType::Var );
|
||||
self.keywords.insert( String::from("while"), TokenType::While );
|
||||
}
|
||||
|
||||
fn scan_tokens(&mut self) {
|
||||
while !self.is_at_end() {
|
||||
self.start = self.current;
|
||||
@@ -62,6 +97,8 @@ impl Scanner {
|
||||
_ => {
|
||||
if is_digit(c) {
|
||||
self.number();
|
||||
} else if is_alpha(c) {
|
||||
self.identifier();
|
||||
} else {
|
||||
// Erreur : lexeme inconnu
|
||||
}
|
||||
@@ -139,4 +176,16 @@ impl Scanner {
|
||||
|
||||
self.add_token( TokenType::Number, self.source[self.start..self.current].into_iter().collect() ); // Il faudra faire un parse sur la chaîne pour connaître la valeur effective
|
||||
}
|
||||
|
||||
fn identifier(&mut self) {
|
||||
while is_alpha_numeric(self.peek()) {
|
||||
self.advance();
|
||||
}
|
||||
|
||||
let text: String = self.source[self.start..self.current].into_iter().collect();
|
||||
match self.keywords.get(&text) {
|
||||
Some( t ) => { self.add_simple_token( *t ) },
|
||||
None => { self.add_token( TokenType::Identifier, text ) }
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum TokenType {
|
||||
// Single-character tokens.
|
||||
LeftParen,
|
||||
@@ -103,3 +103,4 @@ impl std::fmt::Display for crate::rlox::token_type::TokenType {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user