Sort out advancing pointers for whitespace
This commit is contained in:
parent
8140e3c5e6
commit
e9b0eab38a
2 changed files with 5 additions and 3 deletions
|
@ -59,7 +59,8 @@ impl Lexer {
|
||||||
/// Advance the begin pointer to prepare for the next iteration.
|
/// Advance the begin pointer to prepare for the next iteration.
|
||||||
fn advance_begin(&mut self) {
|
fn advance_begin(&mut self) {
|
||||||
self.begin = self.input.index_after(self.forward);
|
self.begin = self.input.index_after(self.forward);
|
||||||
println!("> begin={}", self.begin);
|
self.forward = self.begin;
|
||||||
|
println!("> begin={}, forward={}", self.begin, self.forward);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the substring between the two input indexes. This is the value to give to a new Token instance.
|
/// Get the substring between the two input indexes. This is the value to give to a new Token instance.
|
||||||
|
@ -89,12 +90,13 @@ impl Lexer {
|
||||||
if c.is_newline() {
|
if c.is_newline() {
|
||||||
self.line += 1;
|
self.line += 1;
|
||||||
}
|
}
|
||||||
self.advance();
|
self.advance_begin();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Handle self.state == State::Identifier
|
/// Handle self.state == State::Identifier
|
||||||
fn state_identifier(&mut self, c: char, token: &mut Option<Token>) {
|
fn state_identifier(&mut self, c: char, token: &mut Option<Token>) {
|
||||||
|
println!("Identifier! c='{}'", c);
|
||||||
if c.is_identifier_subsequent() {
|
if c.is_identifier_subsequent() {
|
||||||
// State in Identifier state.
|
// State in Identifier state.
|
||||||
self.advance();
|
self.advance();
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
mod lexer;
|
mod lexer;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let lexer = lexer::Lexer::new(String::from("((abc))"));
|
let lexer = lexer::Lexer::new(String::from("((abc def + ghi))"));
|
||||||
for t in lexer {
|
for t in lexer {
|
||||||
println!("token = {}", t);
|
println!("token = {}", t);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue