Update test names: finds for single tokens, lexes for expressions
This commit is contained in:
parent
b00615a7b6
commit
b731b0323a
1 changed files with 11 additions and 9 deletions
|
@ -385,14 +385,14 @@ mod tests {
|
||||||
use super::token::*;
|
use super::token::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn lexer_finds_parens() {
|
fn finds_parens() {
|
||||||
check_single_token("(", Token::LeftParen(String::from("(")));
|
check_single_token("(", Token::LeftParen(String::from("(")));
|
||||||
check_single_token(")", Token::RightParen(String::from(")")));
|
check_single_token(")", Token::RightParen(String::from(")")));
|
||||||
check_single_token("#(", Token::LeftVectorParen);
|
check_single_token("#(", Token::LeftVectorParen);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn lexer_finds_dots() {
|
fn finds_dots() {
|
||||||
check_single_token(".", Token::Dot);
|
check_single_token(".", Token::Dot);
|
||||||
|
|
||||||
let mut lexer = Lexer::new("abc . abc");
|
let mut lexer = Lexer::new("abc . abc");
|
||||||
|
@ -402,26 +402,28 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn lexer_finds_identifiers() {
|
fn finds_identifiers() {
|
||||||
check_single_token("abc", Token::Identifier(String::from("abc")));
|
let tok = |s: &str| { check_single_token(s, Token::Identifier(String::from(s))); };
|
||||||
check_single_token("+", Token::Identifier(String::from("+")));
|
tok("abc");
|
||||||
check_single_token("-", Token::Identifier(String::from("-")));
|
tok("number?");
|
||||||
|
tok("+");
|
||||||
|
tok("-");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn lexer_finds_booleans() {
|
fn finds_booleans() {
|
||||||
check_single_token("#t", Token::Boolean(true));
|
check_single_token("#t", Token::Boolean(true));
|
||||||
check_single_token("#f", Token::Boolean(false));
|
check_single_token("#f", Token::Boolean(false));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn lexer_finds_comments() {
|
fn finds_comments() {
|
||||||
let s = "; a comment";
|
let s = "; a comment";
|
||||||
check_single_token(s, Token::Comment(String::from(s)));
|
check_single_token(s, Token::Comment(String::from(s)));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn lexer_finds_strings() {
|
fn finds_strings() {
|
||||||
check_single_token("\"\"", Token::String(String::from("\"\"")));
|
check_single_token("\"\"", Token::String(String::from("\"\"")));
|
||||||
check_single_token("\"abc\"", Token::String(String::from("\"abc\"")));
|
check_single_token("\"abc\"", Token::String(String::from("\"abc\"")));
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue