Rename all the Identifier stuff Id

This commit is contained in:
Eryn Wells 2017-01-16 16:23:02 -08:00
parent 61d98ff135
commit ba911741ab
3 changed files with 25 additions and 25 deletions

View file

@ -78,7 +78,7 @@ enum State {
NamedChar(HashSet<&'static str>, String), NamedChar(HashSet<&'static str>, String),
Comment, Comment,
Initial, Initial,
Identifier, Id,
Dot, Dot,
Hash, Hash,
Number, Number,
@ -176,10 +176,10 @@ impl Lexer {
/// Handle self.state == State::Initial /// Handle self.state == State::Initial
fn state_initial(&mut self, c: char) -> StateResult { fn state_initial(&mut self, c: char) -> StateResult {
if c.is_left_paren() { if c.is_left_paren() {
return self.token_result(Token::LeftParen(c.to_string())); return self.token_result(Token::LeftParen);
} }
else if c.is_right_paren() { else if c.is_right_paren() {
return self.token_result(Token::RightParen(c.to_string())); return self.token_result(Token::RightParen);
} }
else if c.is_dot() { else if c.is_dot() {
self.state = State::Dot; self.state = State::Dot;
@ -205,7 +205,7 @@ impl Lexer {
self.advance(); self.advance();
} }
else if c.is_identifier_initial() { else if c.is_identifier_initial() {
self.state = State::Identifier; self.state = State::Id;
self.advance(); self.advance();
} }
@ -235,16 +235,16 @@ impl Lexer {
Ok(None) Ok(None)
} }
/// Handle self.state == State::Identifier /// Handle self.state == State::Id
fn state_identifier(&mut self, c: char) -> StateResult { fn state_identifier(&mut self, c: char) -> StateResult {
if c.is_identifier_subsequent() { if c.is_identifier_subsequent() {
// Stay in Identifier state. // Stay in Id state.
self.advance(); self.advance();
} }
else if c.is_identifier_delimiter() { else if c.is_identifier_delimiter() {
let value = self.value(); let value = self.value();
self.retract(); self.retract();
return self.token_result(Token::Identifier(value)); return self.token_result(Token::Id(value));
} }
else { else {
return self.generic_error(c); return self.generic_error(c);
@ -469,7 +469,7 @@ impl Lexer {
else if c.is_identifier_delimiter() { else if c.is_identifier_delimiter() {
let value = self.value(); let value = self.value();
self.retract(); self.retract();
return self.token_result(Token::Identifier(value)); return self.token_result(Token::Id(value));
} }
else { else {
return self.generic_error(c); return self.generic_error(c);
@ -542,7 +542,7 @@ impl Iterator for Lexer {
State::Comment => self.state_comment(c), State::Comment => self.state_comment(c),
State::Dot => self.state_dot(c), State::Dot => self.state_dot(c),
State::Hash => self.state_hash(c), State::Hash => self.state_hash(c),
State::Identifier => self.state_identifier(c), State::Id => self.state_identifier(c),
State::Initial => self.state_initial(c), State::Initial => self.state_initial(c),
State::Number => self.state_number(c), State::Number => self.state_number(c),
State::NumberDecimal => self.state_number_decimal(c), State::NumberDecimal => self.state_number_decimal(c),
@ -594,8 +594,8 @@ mod tests {
#[test] #[test]
fn finds_parens() { fn finds_parens() {
check_single_token("(", Token::LeftParen(String::from("("))); check_single_token("(", Token::LeftParen);
check_single_token(")", Token::RightParen(String::from(")"))); check_single_token(")", Token::RightParen);
check_single_token("#(", Token::LeftVectorParen); check_single_token("#(", Token::LeftVectorParen);
} }
@ -618,14 +618,14 @@ mod tests {
check_single_token(".", Token::Dot); check_single_token(".", Token::Dot);
let mut lexer = Lexer::new("abc . abc"); let mut lexer = Lexer::new("abc . abc");
assert_next_token(&mut lexer, &Token::Identifier(String::from("abc"))); assert_next_token(&mut lexer, &Token::Id(String::from("abc")));
assert_next_token(&mut lexer, &Token::Dot); assert_next_token(&mut lexer, &Token::Dot);
assert_next_token(&mut lexer, &Token::Identifier(String::from("abc"))); assert_next_token(&mut lexer, &Token::Id(String::from("abc")));
} }
#[test] #[test]
fn finds_identifiers() { fn finds_identifiers() {
let tok = |s: &str| { check_single_token(s, Token::Identifier(String::from(s))); }; let tok = |s: &str| { check_single_token(s, Token::Id(String::from(s))); };
tok("abc"); tok("abc");
tok("number?"); tok("number?");
tok("+"); tok("+");
@ -707,16 +707,16 @@ mod tests {
#[test] #[test]
fn lexes_simple_expression() { fn lexes_simple_expression() {
check_tokens("(+ 3.4 6.8)", vec![ check_tokens("(+ 3.4 6.8)", vec![
Token::LeftParen(String::from("(")), Token::LeftParen,
Token::Identifier(String::from("+")), Token::Id(String::from("+")),
Token::Number(Number::from_float(3.4)), Token::Number(Number::from_float(3.4)),
Token::Number(Number::from_float(6.8)), Token::Number(Number::from_float(6.8)),
Token::RightParen(String::from(")"))]); Token::RightParen]);
} }
#[test] #[test]
fn lexes_quoted_identifier() { fn lexes_quoted_identifier() {
check_tokens("'abc", vec![Token::Quote, Token::Identifier(String::from("abc"))]); check_tokens("'abc", vec![Token::Quote, Token::Id(String::from("abc"))]);
} }
fn check_single_token(input: &str, expected: Token) { fn check_single_token(input: &str, expected: Token) {

View file

@ -10,12 +10,12 @@ pub enum Token {
Character(Character), Character(Character),
Comment(String), Comment(String),
Dot, Dot,
Identifier(String), Id(String),
LeftParen(String), LeftParen,
LeftVectorParen, LeftVectorParen,
Number(Number), Number(Number),
Quote, Quote,
RightParen(String), RightParen,
String(String), String(String),
} }

View file

@ -100,15 +100,15 @@ mod tests {
#[test] #[test]
fn parses_single_expression() { fn parses_single_expression() {
let r = parse("(a)"); let r = parse("(a)");
let list = list("(", vec![Box::new(Expression::Id("a".to_string()))], ")"); let list = list(vec![Box::new(Expression::Id("a".to_string()))]);
assert_eq!(r.unwrap(), Program::new(vec![list, Expression::EOF])); assert_eq!(r.unwrap(), Program::new(vec![list, Expression::EOF]));
} }
fn list(left: &str, expr: Vec<Box<Expression>>, right: &str) -> Expression { fn list(expr: Vec<Box<Expression>>) -> Expression {
Expression::List { Expression::List {
left: Token::LeftParen(left.to_string()), left: Token::LeftParen,
expr: expr, expr: expr,
right: Token::RightParen(right.to_string()) right: Token::RightParen,
} }
} }
} }