main.rs: add extra tests, add Exp, add number parsing

This commit is contained in:
Charlie Root 2025-03-27 12:31:03 +01:00
commit be663876a4
Signed by: faukah
SSH key fingerprint: SHA256:Uj2AXqvtdCA4hn5Hq0ZonhIAyUqI1q4w2sMG3Z1TH7E

View file

@ -2,7 +2,7 @@ use std::io;
#[derive(Debug, PartialEq, Eq)]
enum Symbol {
Number,
Number(i32),
LeftBracket,
RightBracket,
@ -10,6 +10,7 @@ enum Symbol {
Sub,
Mul,
Div,
Exp,
}
#[derive(Debug)]
@ -46,16 +47,33 @@ fn get_user_input() -> io::Result<String> {
#[test]
fn try_tokenizing() {
let tokenized_input = tokenize("1 + 1").unwrap();
let result = vec![Symbol::Number, Symbol::Add, Symbol::Number];
assert!(tokenized_input == result, "1 + 1 not working");
let input = ["1 + 2", "3 - 4", "5 * 6", "7 / 8", "8 ^ 9"];
let tokenized_input: Vec<Vec<Symbol>> = input.iter().map(|s| tokenize(s).unwrap()).collect();
let result: Vec<Vec<Symbol>> = vec![
vec![Symbol::Number(1), Symbol::Add, Symbol::Number(2)],
vec![Symbol::Number(3), Symbol::Sub, Symbol::Number(4)],
vec![Symbol::Number(5), Symbol::Mul, Symbol::Number(6)],
vec![Symbol::Number(7), Symbol::Div, Symbol::Number(8)],
vec![Symbol::Number(8), Symbol::Exp, Symbol::Number(9)],
];
assert!(tokenized_input == result);
}
fn tokenize(input: &str) -> Result<Vec<Symbol>, ParseError> {
let mut tokens: Vec<Symbol> = vec![];
let mut acc = String::new();
for (i, c) in input.chars().enumerate() {
if !c.is_ascii_digit() && !acc.is_empty() {
tokens.push(Symbol::Number(acc.parse().unwrap_or(0)));
acc.clear();
};
match c {
'0'..='9' => {
acc.push(c);
continue;
}
' ' => continue,
'(' => tokens.push(Symbol::LeftBracket),
')' => tokens.push(Symbol::RightBracket),
@ -63,10 +81,14 @@ fn tokenize(input: &str) -> Result<Vec<Symbol>, ParseError> {
'-' => tokens.push(Symbol::Sub),
'*' => tokens.push(Symbol::Mul),
'/' => tokens.push(Symbol::Div),
'0'..='9' => tokens.push(Symbol::Number),
'^' => tokens.push(Symbol::Exp),
'\n' => break,
_ => return Err(ParseError::WrongTokenError { pos: i }),
}
}
if !acc.is_empty() {
tokens.push(Symbol::Number(acc.parse().unwrap_or(0)));
}
Ok(tokens)
}