Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Tokenise as needed instead of collecting tokens #5

Merged
merged 5 commits into from
Jan 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "qcalc"
version = "0.4.0"
version = "0.4.1"
edition = "2021"
authors = ["ochir <ochir_erkhembayar@yahoo.com>"]
description = """
Expand Down
55 changes: 25 additions & 30 deletions src/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,23 +63,25 @@ impl<'ta> App<'ta> {
file.read_to_string(&mut buf)
.expect("Failed to read from RC file");
buf.lines().for_each(|line| {
let tokens = Tokenizer::new(line.chars().collect::<Vec<_>>().as_slice()).into_tokens();
let res = Parser::new(tokens)
.parse()
.expect("Invalid syntax in RC file");
match res {
Stmt::Fn(name, params, body) => {
self.interpreter.declare_function(name, params, body)
}
Stmt::Assign(name, expr) => {
self.interpreter.define(
name,
Value::Num(self.interpreter.interpret_expr(&expr).unwrap_or_else(|_| {
panic!("RC file: {} not found", &self.rc_file.display())
})),
);
let mut tokenizer = Tokenizer::new(line.chars().peekable()).peekable();
if let Some(token) = tokenizer.next() {
let res = Parser::new(tokenizer, token)
.parse()
.expect("Invalid syntax in RC file");
match res {
Stmt::Fn(name, params, body) => {
self.interpreter.declare_function(name, params, body)
}
Stmt::Assign(name, expr) => {
self.interpreter.define(
name,
Value::Num(self.interpreter.interpret_expr(&expr).unwrap_or_else(
|_| panic!("RC file: {} not found", &self.rc_file.display()),
)),
);
}
_ => {}
}
_ => {}
}
});
}
Expand Down Expand Up @@ -129,12 +131,12 @@ impl<'ta> App<'ta> {

pub fn eval(&mut self) {
let input = &self.input.lines()[0];
// TODO: Move the tokenizer into the parser so that we're not doing
// this unnecessary allocation. Figure out how to handle end of expressions
// without the use of semicolons (or implicitly add it in but then if someone
// enters one it would terminate their expression which is weird)
let tokens = Tokenizer::new(input.chars().collect::<Vec<_>>().as_slice()).into_tokens();
match Parser::new(tokens).parse() {
let mut tokenizer = Tokenizer::new(input.chars().peekable()).peekable();
if tokenizer.peek().is_none() {
return;
}
let current = tokenizer.next().unwrap();
match Parser::new(tokenizer, current).parse() {
Ok(stmt) => {
if let Stmt::Expr(expr) = &stmt {
if !self.expr_history.contains(expr) {
Expand Down Expand Up @@ -246,7 +248,7 @@ mod tests {
if let Some(output) = &app.output {
assert_eq!(output.parse::<f64>().unwrap(), expected);
} else {
panic!("Not equal");
panic!("Error: {:?}", app.err);
}
}

Expand All @@ -258,13 +260,6 @@ mod tests {
assert!(app.output.is_some_and(|r| r == "3"));
}

#[test]
fn test_empty_input() {
let mut app = new_app();
input_and_evaluate(&mut app, "");
assert!(app.err.is_some_and(|o| o == "Expected expression, got: "));
}

#[test]
fn test_built_in_fns() {
let mut app = new_app();
Expand Down
5 changes: 3 additions & 2 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,9 @@ pub fn tui() -> Result<(), Box<dyn Error>> {
}

pub fn eval(input: &str) -> Result<f64, Box<dyn Error>> {
let tokens = Tokenizer::new(input.chars().collect::<Vec<_>>().as_slice()).into_tokens();
let stmt = Parser::new(tokens).parse()?;
let mut tokenizer = Tokenizer::new(input.chars().peekable()).peekable();
let current = tokenizer.next().ok_or("Expected expression")?;
let stmt = Parser::new(tokenizer, current).parse()?;
let res = Interpreter::new().interpret(stmt)?;
if let Some(ans) = res {
Ok(ans)
Expand Down
Loading