diff --git a/Cargo.lock b/Cargo.lock index acc1be69e29..e257f940986 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8,6 +8,7 @@ dependencies = [ "gc", "jemallocator", "num-traits", + "once_cell", "rand", "regex", "rustc-hash", @@ -253,7 +254,7 @@ checksum = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674" [[package]] name = "gc" version = "0.3.4" -source = "git+https://github.com/Razican/rust-gc.git?branch=box_str#fadf8eb29b55c27ef973ecc3395bd3c18de849bb" +source = "git+https://github.com/Manishearth/rust-gc.git#332ac522e13bc4d8ea2e46639bfe262e8d3a66a5" dependencies = [ "gc_derive", ] @@ -261,7 +262,7 @@ dependencies = [ [[package]] name = "gc_derive" version = "0.3.4" -source = "git+https://github.com/Razican/rust-gc.git?branch=box_str#fadf8eb29b55c27ef973ecc3395bd3c18de849bb" +source = "git+https://github.com/Manishearth/rust-gc.git#332ac522e13bc4d8ea2e46639bfe262e8d3a66a5" dependencies = [ "proc-macro2", "quote", @@ -351,9 +352,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.69" +version = "0.2.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99e85c08494b21a9054e7fe1374a732aeadaff3980b6990b94bfd3a70f690005" +checksum = "3baa92041a6fec78c687fa0cc2b3fae8884f743d672cf551bed1d6dac6988d0f" [[package]] name = "log" @@ -404,6 +405,12 @@ dependencies = [ "libc", ] +[[package]] +name = "once_cell" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c601810575c99596d4afc46f78a678c80105117c379eb3650cf99b8a21ce5b" + [[package]] name = "oorandom" version = "11.1.1" @@ -465,9 +472,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c1f4b0efa5fc5e8ceb705136bfee52cfdb6a4e3509f770b478cd6ed434232a7" +checksum = "42934bc9c8ab0d3b273a16d8551c8f0fcff46be73276ca083ec2414c15c4ba5e" dependencies = [ "proc-macro2", ] @@ -678,9 +685,9 @@ dependencies = [ [[package]] name = "syn" -version = "1.0.20" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd1b5e337360b1fae433c59fcafa0c6b77c605e92540afa5221a7b81a9eca91d" +checksum = "4696caa4048ac7ce2bcd2e484b3cef88c1004e41b8e945a277e2c25dc0b72060" dependencies = [ "proc-macro2", "quote", diff --git a/boa/Cargo.toml b/boa/Cargo.toml index 1b4608e7112..0624828de1c 100644 --- a/boa/Cargo.toml +++ b/boa/Cargo.toml @@ -11,7 +11,7 @@ exclude = ["../.vscode/*", "../Dockerfile", "../Makefile", "../.editorConfig"] edition = "2018" [dependencies] -gc = { version = "0.3.4", features = ["derive"], git = "https://github.com/Razican/rust-gc.git", branch = "box_str" } +gc = { version = "0.3.4", features = ["derive"], git = "https://github.com/Manishearth/rust-gc.git" } serde_json = "1.0.53" rand = "0.7.3" num-traits = "0.2.11" @@ -20,6 +20,7 @@ rustc-hash = "1.1.0" # Optional Dependencies serde = { version = "1.0.110", features = ["derive"], optional = true } +once_cell = "1.3.1" [dev-dependencies] criterion = "0.3.2" diff --git a/boa/src/builtins/function/mod.rs b/boa/src/builtins/function/mod.rs index b1dae529dfd..065d9616e2a 100644 --- a/boa/src/builtins/function/mod.rs +++ b/boa/src/builtins/function/mod.rs @@ -19,8 +19,8 @@ use crate::{ value::{ResultValue, Value}, }, environment::lexical_environment::{new_function_environment, Environment}, - exec::Interpreter, - syntax::ast::node::{FormalParameter, Node}, + exec::{Executable, Interpreter}, + syntax::ast::node::{FormalParameter, StatementList}, }; use gc::{unsafe_empty_trace, Finalize, Trace}; use std::fmt::{self, Debug}; @@ -48,14 +48,14 @@ pub enum ThisMode { #[derive(Clone, Finalize)] pub enum FunctionBody { BuiltIn(NativeFunctionData), - Ordinary(Node), + Ordinary(StatementList), } impl Debug for FunctionBody { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::BuiltIn(_) => write!(f, "native code"), - Self::Ordinary(node) => write!(f, "{}", node), + Self::Ordinary(statements) => write!(f, "{:?}", statements), } } } @@ -191,7 +191,7 @@ impl Function { // Call body should be set before reaching here let result = match &self.body { - FunctionBody::Ordinary(ref body) => interpreter.exec(body), + FunctionBody::Ordinary(ref body) => body.run(interpreter), _ => panic!("Ordinary function should not have BuiltIn Function body"), }; @@ -250,7 +250,7 @@ impl Function { // Call body should be set before reaching here let result = match &self.body { - FunctionBody::Ordinary(ref body) => interpreter.exec(body), + FunctionBody::Ordinary(ref body) => body.run(interpreter), _ => panic!("Ordinary function should not have BuiltIn Function body"), }; diff --git a/boa/src/exec/array.rs b/boa/src/exec/array.rs index d29038bd58b..2aa92a955dd 100644 --- a/boa/src/exec/array.rs +++ b/boa/src/exec/array.rs @@ -15,12 +15,12 @@ impl Executable for ArrayDecl { let mut elements = Vec::new(); for elem in self.as_ref() { if let Node::Spread(ref x) = elem { - let val = interpreter.exec(x)?; + let val = x.run(interpreter)?; let mut vals = interpreter.extract_array_properties(&val).unwrap(); elements.append(&mut vals); continue; // Don't push array after spread } - elements.push(interpreter.exec(&elem)?); + elements.push(elem.run(interpreter)?); } add_to_array_object(&array, &elements)?; diff --git a/boa/src/exec/arrow_function.rs b/boa/src/exec/arrow_function.rs index 4fb9520f1d4..fccf2dec7e5 100644 --- a/boa/src/exec/arrow_function.rs +++ b/boa/src/exec/arrow_function.rs @@ -22,20 +22,20 @@ impl Executable for ArrowFunctionDecl { // .get_field_slice("Prototype"); let func = FunctionObject::create_ordinary( - self.params.clone(), // TODO: args shouldn't need to be a reference it should be passed by value + self.params().to_vec(), // TODO: args shouldn't need to be a reference it should be passed by value interpreter .realm_mut() .environment .get_current_environment() .clone(), - FunctionBody::Ordinary(*self.body.clone()), + FunctionBody::Ordinary(self.body().to_vec().into()), ThisMode::Lexical, ); let mut new_func = Object::function(); new_func.set_call(func); let val = Value::from(new_func); - val.set_field_slice("length", Value::from(self.params.len())); + val.set_field_slice("length", Value::from(self.params().len())); Ok(val) } diff --git a/boa/src/exec/block.rs b/boa/src/exec/block.rs index 521e922024e..c592a05ab92 100644 --- a/boa/src/exec/block.rs +++ b/boa/src/exec/block.rs @@ -1,3 +1,5 @@ +//! Block statement execution. + use super::{Executable, Interpreter}; use crate::{ builtins::value::{ResultValue, Value}, @@ -15,8 +17,8 @@ impl Executable for Block { } let mut obj = Value::null(); - for statement in self.as_ref() { - obj = interpreter.exec(statement)?; + for statement in self.statements() { + obj = statement.run(interpreter)?; // early return if interpreter.is_return { diff --git a/boa/src/exec/declaration.rs b/boa/src/exec/declaration.rs new file mode 100644 index 00000000000..9e9154228d8 --- /dev/null +++ b/boa/src/exec/declaration.rs @@ -0,0 +1,114 @@ +//! Declaration execution. + +use super::{Executable, Interpreter}; +use crate::{ + builtins::{ + function::{Function as FunctionObject, FunctionBody, ThisMode}, + object::Object, + value::{ResultValue, Value}, + }, + environment::lexical_environment::VariableScope, + syntax::ast::node::{FunctionDecl, FunctionExpr, VarDeclList}, +}; + +impl Executable for FunctionDecl { + fn run(&self, interpreter: &mut Interpreter) -> ResultValue { + // Todo: Function.prototype doesn't exist yet, so the prototype right now is the Object.prototype + // let proto = &self + // .realm + // .environment + // .get_global_object() + // .expect("Could not get the global object") + // .get_field_slice("Object") + // .get_field_slice("Prototype"); + + let func = FunctionObject::create_ordinary( + self.parameters().to_vec(), + interpreter + .realm_mut() + .environment + .get_current_environment() + .clone(), + FunctionBody::Ordinary(self.body().to_vec().into()), + ThisMode::NonLexical, + ); + + let mut new_func = Object::function(); + new_func.set_call(func); + let val = Value::from(new_func); + val.set_field_slice("length", Value::from(self.parameters().len())); + + // Set the name and assign it in the current environment + if let Some(name) = self.name() { + val.set_field_slice("name", Value::from(self.name())); + interpreter.realm_mut().environment.create_mutable_binding( + name.to_owned(), + false, + VariableScope::Function, + ); + + interpreter + .realm_mut() + .environment + .initialize_binding(name, val.clone()); + } + + Ok(val) + } +} + +impl Executable for FunctionExpr { + fn run(&self, interpreter: &mut Interpreter) -> ResultValue { + // Todo: Function.prototype doesn't exist yet, so the prototype right now is the Object.prototype + // let proto = &self + // .realm + // .environment + // .get_global_object() + // .expect("Could not get the global object") + // .get_field_slice("Object") + // .get_field_slice("Prototype"); + + let func = FunctionObject::create_ordinary( + self.parameters().to_vec(), + interpreter + .realm_mut() + .environment + .get_current_environment() + .clone(), + FunctionBody::Ordinary(self.body().to_vec().into()), + ThisMode::NonLexical, + ); + + let mut new_func = Object::function(); + new_func.set_call(func); + let val = Value::from(new_func); + val.set_field_slice("length", Value::from(self.parameters().len())); + + if let Some(name) = self.name() { + val.set_field_slice("name", Value::string(name)); + } + + Ok(val) + } +} + +impl Executable for VarDeclList { + fn run(&self, interpreter: &mut Interpreter) -> ResultValue { + for var in self.as_ref() { + let val = match var.init() { + Some(v) => v.run(interpreter)?, + None => Value::undefined(), + }; + interpreter.realm_mut().environment.create_mutable_binding( + var.name().to_owned(), + false, + VariableScope::Function, + ); + interpreter + .realm_mut() + .environment + .initialize_binding(var.name(), val); + } + Ok(Value::undefined()) + } +} diff --git a/boa/src/exec/mod.rs b/boa/src/exec/mod.rs index 3a722bd8327..0eabd5c2605 100644 --- a/boa/src/exec/mod.rs +++ b/boa/src/exec/mod.rs @@ -3,21 +3,22 @@ mod array; mod arrow_function; mod block; +mod declaration; mod operator; +mod statement_list; #[cfg(test)] mod tests; use crate::{ builtins::{ - function::{Function as FunctionObject, FunctionBody, ThisMode}, object::{ - internal_methods_trait::ObjectInternalMethods, Object, ObjectKind, INSTANCE_PROTOTYPE, + internal_methods_trait::ObjectInternalMethods, ObjectKind, INSTANCE_PROTOTYPE, PROTOTYPE, }, property::Property, value::{ResultValue, Value, ValueData}, }, - environment::lexical_environment::{new_declarative_environment, VariableScope}, + environment::lexical_environment::VariableScope, realm::Realm, syntax::ast::{ constant::Const, @@ -479,7 +480,7 @@ impl Executable for Node { PropertyDefinition::MethodDefinition(kind, name, func) => { if let MethodDefinitionKind::Ordinary = kind { obj.borrow() - .set_field_slice(&name.clone(), interpreter.exec(func)?); + .set_field_slice(&name.clone(), func.run(interpreter)?); } else { // TODO: Implement other types of MethodDefinitionKinds. unimplemented!("other types of property method definitions."); @@ -493,80 +494,9 @@ impl Executable for Node { } Node::ArrayDecl(ref arr) => arr.run(interpreter), // - Node::FunctionDecl(ref name, ref args, ref expr) => { - // Todo: Function.prototype doesn't exist yet, so the prototype right now is the Object.prototype - // let proto = &self - // .realm - // .environment - // .get_global_object() - // .expect("Could not get the global object") - // .get_field_slice("Object") - // .get_field_slice("Prototype"); - - let func = FunctionObject::create_ordinary( - args.clone(), // TODO: args shouldn't need to be a reference it should be passed by value - interpreter - .realm_mut() - .environment - .get_current_environment() - .clone(), - FunctionBody::Ordinary(*expr.clone()), - ThisMode::NonLexical, - ); - - let mut new_func = Object::function(); - new_func.set_call(func); - let val = Value::from(new_func); - val.set_field_slice("length", Value::from(args.len())); - - // Set the name and assign it in the current environment - val.set_field_slice("name", Value::from(name.as_ref())); - interpreter.realm_mut().environment.create_mutable_binding( - name.as_ref().to_owned(), - false, - VariableScope::Function, - ); - - interpreter - .realm_mut() - .environment - .initialize_binding(name, val.clone()); - - Ok(val) - } + Node::FunctionDecl(ref decl) => decl.run(interpreter), // - Node::FunctionExpr(ref name, ref args, ref expr) => { - // Todo: Function.prototype doesn't exist yet, so the prototype right now is the Object.prototype - // let proto = &self - // .realm - // .environment - // .get_global_object() - // .expect("Could not get the global object") - // .get_field_slice("Object") - // .get_field_slice("Prototype"); - - let func = FunctionObject::create_ordinary( - args.clone(), // TODO: args shouldn't need to be a reference it should be passed by value - interpreter - .realm_mut() - .environment - .get_current_environment() - .clone(), - FunctionBody::Ordinary(*expr.clone()), - ThisMode::NonLexical, - ); - - let mut new_func = Object::function(); - new_func.set_call(func); - let val = Value::from(new_func); - val.set_field_slice("length", Value::from(args.len())); - - if let Some(name) = name { - val.set_field_slice("name", Value::string(name.as_ref())); - } - - Ok(val) - } + Node::FunctionExpr(ref expr) => expr.run(interpreter), Node::ArrowFunctionDecl(ref decl) => decl.run(interpreter), Node::BinOp(ref op) => op.run(interpreter), Node::UnaryOp(ref op, ref a) => { @@ -614,8 +544,8 @@ impl Executable for Node { Node::ArrayDecl(_) | Node::Block(_) | Node::Const(_) - | Node::FunctionDecl(_, _, _) - | Node::FunctionExpr(_, _, _) + | Node::FunctionDecl(_) + | Node::FunctionExpr(_) | Node::New(_) | Node::Object(_) | Node::TypeOf(_) @@ -663,25 +593,7 @@ impl Executable for Node { } Node::Throw(ref ex) => Err(interpreter.exec(ex)?), Node::Assign(ref op) => op.run(interpreter), - Node::VarDecl(ref vars) => { - for var in vars.iter() { - let (name, value) = var.clone(); - let val = match value { - Some(v) => interpreter.exec(&v)?, - None => Value::undefined(), - }; - interpreter.realm_mut().environment.create_mutable_binding( - name.as_ref().to_owned(), - false, - VariableScope::Function, - ); - interpreter - .realm_mut() - .environment - .initialize_binding(&name, val); - } - Ok(Value::undefined()) - } + Node::VarDeclList(ref decl) => decl.run(interpreter), Node::LetDecl(ref vars) => { for var in vars.iter() { let (name, value) = var.clone(); @@ -737,32 +649,6 @@ impl Executable for Node { } })) } - Node::StatementList(ref list) => { - { - let env = &mut interpreter.realm_mut().environment; - env.push(new_declarative_environment(Some( - env.get_current_environment_ref().clone(), - ))); - } - - let mut obj = Value::null(); - for (i, item) in list.iter().enumerate() { - let val = interpreter.exec(item)?; - // early return - if interpreter.is_return { - obj = val; - break; - } - if i + 1 == list.len() { - obj = val; - } - } - - // pop the block env - let _ = interpreter.realm_mut().environment.pop(); - - Ok(obj) - } Node::Spread(ref node) => { // TODO: for now we can do nothing but return the value as-is interpreter.exec(node) diff --git a/boa/src/exec/operator.rs b/boa/src/exec/operator.rs index e4617bb59b5..8bec69e4c15 100644 --- a/boa/src/exec/operator.rs +++ b/boa/src/exec/operator.rs @@ -12,7 +12,7 @@ use crate::{ impl Executable for Assign { fn run(&self, interpreter: &mut Interpreter) -> ResultValue { - let val = interpreter.exec(self.rhs())?; + let val = self.rhs().run(interpreter)?; match self.lhs() { Node::Local(ref name) => { if interpreter.realm().environment.has_binding(name.as_ref()) { @@ -35,12 +35,12 @@ impl Executable for Assign { } } Node::GetConstField(ref obj, ref field) => { - let val_obj = interpreter.exec(obj)?; + let val_obj = obj.run(interpreter)?; val_obj.set_field_slice(&field.clone(), val.clone()); } Node::GetField(ref obj, ref field) => { - let val_obj = interpreter.exec(obj)?; - let val_field = interpreter.exec(field)?; + let val_obj = obj.run(interpreter)?; + let val_field = field.run(interpreter)?; val_obj.set_field(val_field, val.clone()); } _ => (), @@ -53,8 +53,8 @@ impl Executable for BinOp { fn run(&self, interpreter: &mut Interpreter) -> ResultValue { match self.op() { op::BinOp::Num(op) => { - let v_a = interpreter.exec(self.lhs())?; - let v_b = interpreter.exec(self.rhs())?; + let v_a = self.lhs().run(interpreter)?; + let v_b = self.rhs().run(interpreter)?; Ok(match op { NumOp::Add => v_a + v_b, NumOp::Sub => v_a - v_b, @@ -65,8 +65,8 @@ impl Executable for BinOp { }) } op::BinOp::Bit(op) => { - let v_a = interpreter.exec(self.lhs())?; - let v_b = interpreter.exec(self.rhs())?; + let v_a = self.lhs().run(interpreter)?; + let v_b = self.rhs().run(interpreter)?; Ok(match op { BitOp::And => v_a & v_b, BitOp::Or => v_a | v_b, @@ -78,8 +78,8 @@ impl Executable for BinOp { }) } op::BinOp::Comp(op) => { - let mut v_a = interpreter.exec(self.lhs())?; - let mut v_b = interpreter.exec(self.rhs())?; + let mut v_a = self.lhs().run(interpreter)?; + let mut v_b = self.rhs().run(interpreter)?; Ok(Value::from(match op { CompOp::Equal if v_a.is_object() => v_a == v_b, CompOp::Equal => v_a == v_b, @@ -107,12 +107,12 @@ impl Executable for BinOp { let to_bool = |value| bool::from(&value); Ok(match op { LogOp::And => Value::from( - to_bool(interpreter.exec(self.lhs())?) - && to_bool(interpreter.exec(self.rhs())?), + to_bool(self.lhs().run(interpreter)?) + && to_bool(self.rhs().run(interpreter)?), ), LogOp::Or => Value::from( - to_bool(interpreter.exec(self.lhs())?) - || to_bool(interpreter.exec(self.rhs())?), + to_bool(self.lhs().run(interpreter)?) + || to_bool(self.rhs().run(interpreter)?), ), }) } @@ -122,7 +122,7 @@ impl Executable for BinOp { .realm() .environment .get_binding_value(name.as_ref()); - let v_b = interpreter.exec(self.rhs())?; + let v_b = self.rhs().run(interpreter)?; let value = Self::run_assign(op, v_a, v_b); interpreter.realm.environment.set_mutable_binding( name.as_ref(), @@ -132,9 +132,9 @@ impl Executable for BinOp { Ok(value) } Node::GetConstField(ref obj, ref field) => { - let v_r_a = interpreter.exec(obj)?; + let v_r_a = obj.run(interpreter)?; let v_a = v_r_a.get_field_slice(field); - let v_b = interpreter.exec(self.rhs())?; + let v_b = self.rhs().run(interpreter)?; let value = Self::run_assign(op, v_a, v_b); v_r_a.set_field_slice(&field.clone(), value.clone()); Ok(value) diff --git a/boa/src/exec/statement_list.rs b/boa/src/exec/statement_list.rs new file mode 100644 index 00000000000..e9fbc19e567 --- /dev/null +++ b/boa/src/exec/statement_list.rs @@ -0,0 +1,37 @@ +//! Statement list execution. + +use super::{Executable, Interpreter}; +use crate::{ + builtins::value::{ResultValue, Value}, + environment::lexical_environment::new_declarative_environment, + syntax::ast::node::StatementList, +}; + +impl Executable for StatementList { + fn run(&self, interpreter: &mut Interpreter) -> ResultValue { + { + let env = &mut interpreter.realm_mut().environment; + env.push(new_declarative_environment(Some( + env.get_current_environment_ref().clone(), + ))); + } + + let mut obj = Value::null(); + for (i, item) in self.statements().iter().enumerate() { + let val = interpreter.exec(item)?; + // early return + if interpreter.is_return { + obj = val; + break; + } + if i + 1 == self.statements().len() { + obj = val; + } + } + + // pop the block env + let _ = interpreter.realm_mut().environment.pop(); + + Ok(obj) + } +} diff --git a/boa/src/lib.rs b/boa/src/lib.rs index 62321d4de06..f84c58d8c7f 100644 --- a/boa/src/lib.rs +++ b/boa/src/lib.rs @@ -40,12 +40,12 @@ pub mod realm; pub mod syntax; use crate::{ builtins::value::ResultValue, - exec::Interpreter, + exec::{Executable, Interpreter}, realm::Realm, - syntax::{ast::node::Node, lexer::Lexer, parser::Parser}, + syntax::{ast::node::StatementList, lexer::Lexer, parser::Parser}, }; -fn parser_expr(src: &str) -> Result { +fn parser_expr(src: &str) -> Result { let mut lexer = Lexer::new(src); lexer.lex().map_err(|e| format!("SyntaxError: {}", e))?; let tokens = lexer.tokens; @@ -59,16 +59,11 @@ fn parser_expr(src: &str) -> Result { pub fn forward(engine: &mut Interpreter, src: &str) -> String { // Setup executor let expr = match parser_expr(src) { - Ok(v) => v, - Err(error_string) => { - return error_string; - } + Ok(res) => res, + Err(e) => return e, }; - let result = engine.exec(&expr); - match result { - Ok(v) => v.to_string(), - Err(v) => format!("{}: {}", "Error", v.to_string()), - } + expr.run(engine) + .map_or_else(|v| v.to_string(), |e| format!("Error: {}", e)) } /// Execute the code using an existing Interpreter. @@ -78,7 +73,7 @@ pub fn forward(engine: &mut Interpreter, src: &str) -> String { pub fn forward_val(engine: &mut Interpreter, src: &str) -> ResultValue { // Setup executor match parser_expr(src) { - Ok(expr) => engine.exec(&expr), + Ok(expr) => expr.run(engine), Err(e) => { eprintln!("{}", e); std::process::exit(1); diff --git a/boa/src/syntax/ast/mod.rs b/boa/src/syntax/ast/mod.rs index 7d1d1c7fd17..1657b783720 100644 --- a/boa/src/syntax/ast/mod.rs +++ b/boa/src/syntax/ast/mod.rs @@ -4,6 +4,14 @@ pub mod constant; pub mod keyword; pub mod node; pub mod op; -pub mod pos; -pub mod punc; +pub mod position; +pub mod punctuator; pub mod token; + +pub use self::{ + keyword::Keyword, + node::Node, + position::{Position, Span}, + punctuator::Punctuator, + token::{Token, TokenKind}, +}; diff --git a/boa/src/syntax/ast/node/array.rs b/boa/src/syntax/ast/node/array.rs index 55763df7c01..c1f2479e59e 100644 --- a/boa/src/syntax/ast/node/array.rs +++ b/boa/src/syntax/ast/node/array.rs @@ -24,9 +24,9 @@ use serde::{Deserialize, Serialize}; /// [spec]: https://tc39.es/ecma262/#prod-ArrayLiteral /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[cfg_attr(feature = "serde", serde(transparent))] #[derive(Clone, Debug, Trace, Finalize, PartialEq)] pub struct ArrayDecl { + #[cfg_attr(feature = "serde", serde(flatten))] arr: Box<[Node]>, } diff --git a/boa/src/syntax/ast/node/arrow_function.rs b/boa/src/syntax/ast/node/arrow_function.rs index 926448a8010..8b137891791 100644 --- a/boa/src/syntax/ast/node/arrow_function.rs +++ b/boa/src/syntax/ast/node/arrow_function.rs @@ -1,62 +1 @@ -//! Arrow function declaration node. -use super::{join_nodes, FormalParameter, Node}; -use gc::{Finalize, Trace}; -use std::fmt; - -#[cfg(feature = "serde")] -use serde::{Deserialize, Serialize}; - -/// An arrow function expression is a syntactically compact alternative to a regular function -/// expression. -/// -/// Arrow function expressions are ill suited as methods, and they cannot be used as -/// constructors. Arrow functions cannot be used as constructors and will throw an error when -/// used with new. -/// -/// More information: -/// - [ECMAScript reference][spec] -/// - [MDN documentation][mdn] -/// -/// [spec]: https://tc39.es/ecma262/#prod-ArrowFunction -/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/Arrow_functions -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Clone, Debug, Trace, Finalize, PartialEq)] -pub struct ArrowFunctionDecl { - pub(crate) params: Box<[FormalParameter]>, - pub(crate) body: Box, -} - -impl ArrowFunctionDecl { - /// Creates a new `ArrowFunctionDecl` AST node. - pub(crate) fn new(params: P, body: B) -> Self - where - P: Into>, - B: Into>, - { - Self { - params: params.into(), - body: body.into(), - } - } - - /// Implements the display formatting with indentation. - pub(super) fn display(&self, f: &mut fmt::Formatter<'_>, indentation: usize) -> fmt::Result { - write!(f, "(")?; - join_nodes(f, &self.params)?; - f.write_str(") => ")?; - self.body.display(f, indentation) - } -} - -impl fmt::Display for ArrowFunctionDecl { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.display(f, 0) - } -} - -impl From for Node { - fn from(decl: ArrowFunctionDecl) -> Self { - Self::ArrowFunctionDecl(decl) - } -} diff --git a/boa/src/syntax/ast/node/block.rs b/boa/src/syntax/ast/node/block.rs index 2fef1629b4e..a71e4ddf2b8 100644 --- a/boa/src/syntax/ast/node/block.rs +++ b/boa/src/syntax/ast/node/block.rs @@ -23,12 +23,27 @@ use serde::{Deserialize, Serialize}; /// [spec]: https://tc39.es/ecma262/#prod-BlockStatement /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/block #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "serde", serde(transparent))] #[derive(Clone, Debug, Trace, Finalize, PartialEq)] pub struct Block { + #[cfg_attr(feature = "serde", serde(flatten))] statements: StatementList, } impl Block { + /// Gets the list of statements in this block. + pub(crate) fn statements(&self) -> &[Node] { + self.statements.statements() + } + + /// Gets the lexically declared names. + /// + /// More information: + /// + pub(crate) fn lexically_declared_names(&self) -> &[Box] { + self.statements.lexically_declared_names() + } + /// Implements the display formatting with indentation. pub(super) fn display(&self, f: &mut fmt::Formatter<'_>, indentation: usize) -> fmt::Result { writeln!(f, "{{")?; @@ -37,6 +52,17 @@ impl Block { } } +impl From for Block +where + T: Into, +{ + fn from(list: T) -> Self { + Self { + statements: list.into(), + } + } +} + impl fmt::Display for Block { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.display(f, 0) diff --git a/boa/src/syntax/ast/node/declaration.rs b/boa/src/syntax/ast/node/declaration.rs new file mode 100644 index 00000000000..579ce9b771c --- /dev/null +++ b/boa/src/syntax/ast/node/declaration.rs @@ -0,0 +1,334 @@ +//! Declaration nodes. + +use super::{join_nodes, FormalParameter, Node, StatementList}; +use gc::{Finalize, Trace}; +use std::fmt; + +#[cfg(feature = "serde")] +use serde::{Deserialize, Serialize}; +/// The `var` statement declares a variable, optionally initializing it to a value. +/// +/// var declarations, wherever they occur, are processed before any code is executed. This is +/// called hoisting, and is discussed further below. +/// +/// The scope of a variable declared with var is its current execution context, which is either +/// the enclosing function or, for variables declared outside any function, global. If you +/// re-declare a JavaScript variable, it will not lose its value. +/// +/// Assigning a value to an undeclared variable implicitly creates it as a global variable (it +/// becomes a property of the global object) when the assignment is executed. +/// +/// More information: +/// - [ECMAScript reference][spec] +/// - [MDN documentation][mdn] +/// +/// [spec]: https://tc39.es/ecma262/#prod-VariableStatement +/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/var +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[derive(Clone, Debug, Trace, Finalize, PartialEq)] +pub struct VarDeclList { + #[cfg_attr(feature = "serde", serde(flatten))] + vars: Box<[VarDecl]>, +} + +impl From for VarDeclList +where + T: Into>, +{ + fn from(list: T) -> Self { + Self { vars: list.into() } + } +} + +impl From for VarDeclList { + fn from(decl: VarDecl) -> Self { + Self { + vars: Box::new([decl]), + } + } +} + +impl AsRef<[VarDecl]> for VarDeclList { + fn as_ref(&self) -> &[VarDecl] { + &self.vars + } +} + +impl fmt::Display for VarDeclList { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if !self.vars.is_empty() { + write!(f, "var ")?; + join_nodes(f, &self.vars) + } else { + Ok(()) + } + } +} + +impl From for Node +where + T: Into, +{ + fn from(list: T) -> Self { + Self::VarDeclList(list.into()) + } +} + +/// Individual variable declaration. +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[derive(Clone, Debug, Trace, Finalize, PartialEq)] +pub struct VarDecl { + name: Box, + init: Option, +} + +impl fmt::Display for VarDecl { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&self.name, f)?; + if let Some(ref init) = self.init { + write!(f, " = {}", init); + } + Ok(()) + } +} + +impl VarDecl { + /// Creates a new variable declaration. + pub(crate) fn new(name: N, init: I) -> Self + where + N: Into>, + I: Into>, + { + Self { + name: name.into(), + init: init.into(), + } + } + + /// Gets the name of the variable. + pub fn name(&self) -> &str { + &self.name + } + + /// Gets the initialization node for the variable, if any. + pub fn init(&self) -> Option<&Node> { + self.init.as_ref() + } +} + +/// The `function` expression defines a function with the specified parameters. +/// +/// A function created with a function expression is a `Function` object and has all the +/// properties, methods and behavior of `Function`. +/// +/// A function can also be created using a declaration (see function expression). +/// +/// By default, functions return `undefined`. To return any other value, the function must have +/// a return statement that specifies the value to return. +/// +/// More information: +/// - [ECMAScript reference][spec] +/// - [MDN documentation][mdn] +/// +/// [spec]: https://tc39.es/ecma262/#sec-terms-and-definitions-function +/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/function +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[derive(Clone, Debug, Trace, Finalize, PartialEq)] +pub struct FunctionExpr { + #[cfg_attr(feature = "serde", serde(flatten))] + inner: FunctionDecl, +} + +impl FunctionExpr { + /// Creates a new function expression + pub(crate) fn new(name: N, parameters: P, body: B) -> Self + where + N: Into>>, + P: Into>, + B: Into, + { + Self { + inner: FunctionDecl::new(name, parameters, body), + } + } + + /// Gets the name of the function declaration. + pub fn name(&self) -> Option<&str> { + self.inner.name() + } + + /// Gets the list of parameters of the function declaration. + pub fn parameters(&self) -> &[FormalParameter] { + self.inner.parameters() + } + + /// Gets the body of the function declaration. + pub fn body(&self) -> &[Node] { + self.inner.body() + } + + /// Implements the display formatting with indentation. + pub(super) fn display(&self, f: &mut fmt::Formatter<'_>, indentation: usize) -> fmt::Result { + self.inner.display(f, indentation) + } +} + +impl fmt::Display for FunctionExpr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.display(f, 0) + } +} + +impl From for Node { + fn from(expr: FunctionExpr) -> Self { + Self::FunctionExpr(expr) + } +} + +/// The `function` declaration (function statement) defines a function with the specified +/// parameters. +/// +/// A function created with a function declaration is a `Function` object and has all the +/// properties, methods and behavior of `Function`. +/// +/// A function can also be created using an expression (see [function expression][func_expr]). +/// +/// By default, functions return `undefined`. To return any other value, the function must have +/// a return statement that specifies the value to return. +/// +/// More information: +/// - [ECMAScript reference][spec] +/// - [MDN documentation][mdn] +/// +/// [spec]: https://tc39.es/ecma262/#sec-terms-and-definitions-function +/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/function +/// [func_expr]: ../enum.Node.html#variant.FunctionExpr +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[derive(Clone, Debug, Trace, Finalize, PartialEq)] +pub struct FunctionDecl { + name: Option>, + parameters: Box<[FormalParameter]>, + body: StatementList, +} + +impl FunctionDecl { + /// Creates a new function declaration. + pub(crate) fn new(name: N, parameters: P, body: B) -> Self + where + N: Into>>, + P: Into>, + B: Into, + { + Self { + name: name.into(), + parameters: parameters.into(), + body: body.into(), + } + } + + /// Gets the name of the function declaration. + pub fn name(&self) -> Option<&str> { + self.name.as_ref().map(Box::as_ref) + } + + /// Gets the list of parameters of the function declaration. + pub fn parameters(&self) -> &[FormalParameter] { + &self.parameters + } + + /// Gets the body of the function declaration. + pub fn body(&self) -> &[Node] { + self.body.statements() + } + + /// Implements the display formatting with indentation. + pub(super) fn display(&self, f: &mut fmt::Formatter<'_>, indentation: usize) -> fmt::Result { + f.write_str("function")?; + if let Some(ref name) = self.name { + write!(f, " {}", name); + } + f.write_str("(")?; + join_nodes(f, &self.parameters)?; + f.write_str(") {{")?; + + self.body.display(f, indentation + 1); + + writeln!(f, "}}") + } +} + +impl From for Node { + fn from(decl: FunctionDecl) -> Self { + Self::FunctionDecl(decl) + } +} + +impl fmt::Display for FunctionDecl { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.display(f, 0) + } +} + +/// An arrow function expression is a syntactically compact alternative to a regular function +/// expression. +/// +/// Arrow function expressions are ill suited as methods, and they cannot be used as +/// constructors. Arrow functions cannot be used as constructors and will throw an error when +/// used with new. +/// +/// More information: +/// - [ECMAScript reference][spec] +/// - [MDN documentation][mdn] +/// +/// [spec]: https://tc39.es/ecma262/#prod-ArrowFunction +/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/Arrow_functions +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[derive(Clone, Debug, Trace, Finalize, PartialEq)] +pub struct ArrowFunctionDecl { + params: Box<[FormalParameter]>, + body: StatementList, +} + +impl ArrowFunctionDecl { + /// Creates a new `ArrowFunctionDecl` AST node. + pub(crate) fn new(params: P, body: B) -> Self + where + P: Into>, + B: Into, + { + Self { + params: params.into(), + body: body.into(), + } + } + + /// Gets the list of parameters of the arrow function. + pub(crate) fn params(&self) -> &[FormalParameter] { + &self.params + } + + /// Gets the body of the arrow function. + pub(crate) fn body(&self) -> &[Node] { + &self.body.statements() + } + + /// Implements the display formatting with indentation. + pub(super) fn display(&self, f: &mut fmt::Formatter<'_>, indentation: usize) -> fmt::Result { + write!(f, "(")?; + join_nodes(f, &self.params)?; + f.write_str(") => ")?; + self.body.display(f, indentation) + } +} + +impl fmt::Display for ArrowFunctionDecl { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.display(f, 0) + } +} + +impl From for Node { + fn from(decl: ArrowFunctionDecl) -> Self { + Self::ArrowFunctionDecl(decl) + } +} diff --git a/boa/src/syntax/ast/node/mod.rs b/boa/src/syntax/ast/node/mod.rs index d5bd69b2589..dfd2652e100 100644 --- a/boa/src/syntax/ast/node/mod.rs +++ b/boa/src/syntax/ast/node/mod.rs @@ -1,26 +1,30 @@ //! This module implements the `Node` structure, which composes the AST. pub mod array; -pub mod arrow_function; pub mod block; +pub mod declaration; pub mod local; pub mod operator; pub mod statement_list; pub use self::{ array::ArrayDecl, - arrow_function::ArrowFunctionDecl, block::Block, + declaration::{ArrowFunctionDecl, FunctionDecl, FunctionExpr, VarDecl, VarDeclList}, local::Local, operator::{Assign, BinOp}, - statement_list::{StatementList, VarDecl}, + statement_list::StatementList, }; use crate::syntax::ast::{ constant::Const, op::{Operator, UnaryOp}, }; use gc::{Finalize, Trace}; -use std::fmt::{self, Display}; +use once_cell::sync::OnceCell; +use std::{ + cmp::Ordering, + fmt::{self, Display}, +}; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; @@ -153,23 +157,11 @@ pub enum Node { /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/do...while DoWhileLoop(Box, Box), - /// The `function` expression defines a function with the specified parameters. - /// - /// A function created with a function expression is a `Function` object and has all the - /// properties, methods and behavior of `Function`. - /// - /// A function can also be created using a declaration (see function expression). - /// - /// By default, functions return `undefined`. To return any other value, the function must have - /// a return statement that specifies the value to return. - /// - /// More information: - /// - [ECMAScript reference][spec] - /// - [MDN documentation][mdn] - /// - /// [spec]: https://tc39.es/ecma262/#sec-terms-and-definitions-function - /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/function - FunctionExpr(Option>, Box<[FormalParameter]>, Box), + /// A function declaration node. [More information](./declaration/struct.FunctionDecl.html). + FunctionDecl(FunctionDecl), + + /// A function expressino node. [More information](./declaration/struct.FunctionExpr.html) + FunctionExpr(FunctionExpr), /// This property accessor provides access to an object's properties by using the /// [dot notation][mdn]. @@ -439,6 +431,9 @@ pub enum Node { /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Expressions_and_Operators#Unary_operators UnaryOp(UnaryOp, Box), + /// Array declaration node. [More information](./declaration/struct.VarDeclList.html). + VarDeclList(VarDeclList), + /// The `while` statement creates a loop that executes a specified statement as long as the /// test condition evaluates to `true`. /// @@ -488,7 +483,28 @@ impl Display for Node { } } +impl AsRef for Node { + fn as_ref(&self) -> &Self { + &self + } +} + impl Node { + /// Returns a node ordering based on the hoistability of each node. + pub(crate) fn hoistable_order(a: &Node, b: &Node) -> Ordering { + match (a, b) { + (Node::FunctionDecl(_), Node::FunctionDecl(_)) => Ordering::Equal, + (_, Node::FunctionDecl(_)) => Ordering::Greater, + (Node::FunctionDecl(_), _) => Ordering::Less, + + (Node::VarDeclList(_), Node::VarDeclList(_)) => Ordering::Equal, + (_, Node::VarDeclList(_)) => Ordering::Greater, + (Node::VarDeclList(_), _) => Ordering::Less, + + (_, _) => Ordering::Equal, + } + } + /// Creates a `Break` AST node. pub fn break_node(label: OL) -> Self where @@ -551,17 +567,6 @@ impl Node { Self::DoWhileLoop(body.into(), condition.into()) } - /// Creates a `FunctionDecl` AST node. - pub fn function_expr(name: ON, params: P, body: B) -> Self - where - N: Into>, - ON: Into>, - P: Into>, - B: Into>, - { - Self::FunctionExpr(name.into().map(N::into), params.into(), body.into()) - } - /// Creates a `GetConstField` AST node. pub fn get_const_field(value: V, label: L) -> Self where @@ -708,14 +713,6 @@ impl Node { Self::UnaryOp(op, val.into()) } - // /// Creates a `VarDecl` AST node. - // pub fn var_decl(init: I) -> Self - // where - // I: Into, Option)]>>, - // { - // Self::VarDecl(init.into()) - // } - /// Creates a `WhileLoop` AST node. pub fn while_loop(condition: C, body: B) -> Self where @@ -725,6 +722,18 @@ impl Node { Self::WhileLoop(condition.into(), body.into()) } + /// Gets the lexically declared names. + /// + /// More information: + /// + pub(crate) fn lexically_declared_names(&self) -> &[Box] { + static LIST: OnceCell]>> = OnceCell::new(); + + LIST.get_or_init(|| match *self { + _ => unimplemented!(), + }) + } + /// Implements the display formatting with indentation. fn display(&self, f: &mut fmt::Formatter<'_>, indentation: usize) -> fmt::Result { let indent = " ".repeat(indentation); @@ -846,16 +855,9 @@ impl Node { f.write_str("}") } Self::ArrayDecl(ref arr) => Display::fmt(arr, f), - Self::FunctionExpr(ref name, ref args, ref node) => { - write!(f, "function ")?; - if let Some(func_name) = name { - write!(f, "{}", func_name)?; - } - write!(f, "{{")?; - join_nodes(f, args)?; - f.write_str("} ")?; - node.display(f, indentation + 1) - } + Self::VarDeclList(ref list) => Display::fmt(list, f), + Self::FunctionDecl(ref decl) => decl.display(f, indentation), + Self::FunctionExpr(ref expr) => expr.display(f, indentation), Self::ArrowFunctionDecl(ref decl) => decl.display(f, indentation), Self::BinOp(ref op) => Display::fmt(op, f), Self::UnaryOp(ref op, ref a) => write!(f, "{}{}", op, a), @@ -1010,7 +1012,7 @@ pub enum PropertyDefinition { /// /// [spec]: https://tc39.es/ecma262/#prod-MethodDefinition /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Object_initializer#Method_definitions - MethodDefinition(MethodDefinitionKind, Box, Node), + MethodDefinition(MethodDefinitionKind, Box, FunctionExpr), /// The Rest/Spread Properties for ECMAScript proposal (stage 4) adds spread properties to object literals. /// It copies own enumerable properties from a provided object onto a new object. @@ -1045,12 +1047,11 @@ impl PropertyDefinition { } /// Creates a `MethodDefinition`. - pub fn method_definition(kind: MethodDefinitionKind, name: N, body: B) -> Self + pub fn method_definition(kind: MethodDefinitionKind, name: N, body: FunctionExpr) -> Self where N: Into>, - B: Into, { - Self::MethodDefinition(kind, name.into(), body.into()) + Self::MethodDefinition(kind, name.into(), body) } /// Creates a `SpreadObject`. diff --git a/boa/src/syntax/ast/node/statement_list.rs b/boa/src/syntax/ast/node/statement_list.rs index cc2d818c8f1..b302d6cba88 100644 --- a/boa/src/syntax/ast/node/statement_list.rs +++ b/boa/src/syntax/ast/node/statement_list.rs @@ -1,7 +1,8 @@ //! Statement list node. -use super::{join_nodes, FormalParameter, Node}; +use super::Node; use gc::{Finalize, Trace}; +use once_cell::sync::OnceCell; use std::fmt; #[cfg(feature = "serde")] @@ -18,31 +19,36 @@ use serde::{Deserialize, Serialize}; #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Clone, Debug, Trace, Finalize, PartialEq)] pub struct StatementList { - functions: Box<[FunctionDecl]>, + #[cfg_attr(feature = "serde", serde(flatten))] statements: Box<[Node]>, } impl StatementList { - /// Creates a new statement list. - pub(crate) fn new(functions: F, statements: S) -> Self - where - F: Into>, - S: Into>, - { - Self { - functions: functions.into(), - statements: statements.into(), - } + /// Gets the list of statements. + pub fn statements(&self) -> &[Node] { + &self.statements + } + + /// Gets the lexically declared names. + /// + /// More information: + /// + pub(crate) fn lexically_declared_names(&self) -> &[Box] { + static LIST: OnceCell]>> = OnceCell::new(); + + LIST.get_or_init(|| { + self.statements + .iter() + .map(|node| node.lexically_declared_names()) + .flatten() + .cloned() + .collect::>() + .into_boxed_slice() + }) } /// Implements the display formatting with indentation. pub(super) fn display(&self, f: &mut fmt::Formatter<'_>, indentation: usize) -> fmt::Result { - // Print the functions first. - for function in self.functions.iter() { - function.display(f, indentation)?; - writeln!(f); - } - // Print statements for node in self.statements.iter() { node.display(f, indentation + 1)?; @@ -60,91 +66,19 @@ impl StatementList { } } -impl fmt::Display for StatementList { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.display(f, 0) - } -} - -/// The `var` statement declares a variable, optionally initializing it to a value. -/// -/// var declarations, wherever they occur, are processed before any code is executed. This is -/// called hoisting, and is discussed further below. -/// -/// The scope of a variable declared with var is its current execution context, which is either -/// the enclosing function or, for variables declared outside any function, global. If you -/// re-declare a JavaScript variable, it will not lose its value. -/// -/// Assigning a value to an undeclared variable implicitly creates it as a global variable (it -/// becomes a property of the global object) when the assignment is executed. -/// -/// More information: -/// - [ECMAScript reference][spec] -/// - [MDN documentation][mdn] -/// -/// [spec]: https://tc39.es/ecma262/#prod-VariableStatement -/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/var -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Clone, Debug, Trace, Finalize, PartialEq)] -pub struct VarDecl { - vars: Box<[Box]>, -} - -impl From for VarDecl +impl From for StatementList where - T: Into]>>, + T: Into>, { - fn from(list: T) -> Self { - Self { vars: list.into() } - } -} - -impl fmt::Display for VarDecl { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if !self.vars.is_empty() { - write!(f, "var ")?; - join_nodes(f, &self.vars) - } else { - Ok(()) + fn from(stm: T) -> Self { + Self { + statements: stm.into(), } } } -/// The `function` declaration (function statement) defines a function with the specified -/// parameters. -/// -/// A function created with a function declaration is a `Function` object and has all the -/// properties, methods and behavior of `Function`. -/// -/// A function can also be created using an expression (see [function expression][func_expr]). -/// -/// By default, functions return `undefined`. To return any other value, the function must have -/// a return statement that specifies the value to return. -/// -/// More information: -/// - [ECMAScript reference][spec] -/// - [MDN documentation][mdn] -/// -/// [spec]: https://tc39.es/ecma262/#sec-terms-and-definitions-function -/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/function -/// [func_expr]: ../enum.Node.html#variant.FunctionExpr -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Clone, Debug, Trace, Finalize, PartialEq)] -pub struct FunctionDecl { - name: Box, - parameters: Box<[FormalParameter]>, - body: StatementList, -} - -impl FunctionDecl { - /// Implements the display formatting with indentation. - fn display(&self, f: &mut fmt::Formatter<'_>, indentation: usize) -> fmt::Result { - write!(f, "function {} (", self.name)?; - join_nodes(f, &self.parameters)?; - f.write_str(") {{")?; - - self.body.display(f, indentation + 1); - - writeln!(f, "}}") +impl fmt::Display for StatementList { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.display(f, 0) } } diff --git a/boa/src/syntax/ast/pos.rs b/boa/src/syntax/ast/pos.rs deleted file mode 100644 index a77d50e8e8c..00000000000 --- a/boa/src/syntax/ast/pos.rs +++ /dev/null @@ -1,36 +0,0 @@ -//! This module implements the `Pos` structure, which represents a position in the source code. - -#[cfg(feature = "serde")] -use serde::{Deserialize, Serialize}; - -/// A position in the Javascript source code. -/// -/// Stores both the column number and the line number -/// -/// ## Similar Implementations -/// [V8: Location](https://cs.chromium.org/chromium/src/v8/src/parsing/scanner.h?type=cs&q=isValid+Location&g=0&l=216) -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -#[derive(Clone, Copy, PartialEq, Debug)] -pub struct Position { - // Column number - pub column_number: u64, - // Line number - pub line_number: u64, -} - -impl Position { - /// Creates a new `Position`. - /// - /// Positions are usually created by a [`Token`](struct.token/Token.html). - /// - /// # Arguments - /// - /// * `line_number` - The line number the token starts at - /// * `column_number` - The column number the token starts at - pub fn new(line_number: u64, column_number: u64) -> Self { - Self { - line_number, - column_number, - } - } -} diff --git a/boa/src/syntax/ast/position.rs b/boa/src/syntax/ast/position.rs new file mode 100644 index 00000000000..329485af4be --- /dev/null +++ b/boa/src/syntax/ast/position.rs @@ -0,0 +1,277 @@ +//! This module implements the `Pos` structure, which represents a position in the source code. + +use std::{cmp::Ordering, fmt}; + +#[cfg(feature = "serde")] +use serde::{Deserialize, Serialize}; + +/// A position in the JavaScript source code. +/// +/// Stores both the column number and the line number +/// +/// ## Similar Implementations +/// [V8: Location](https://cs.chromium.org/chromium/src/v8/src/parsing/scanner.h?type=cs&q=isValid+Location&g=0&l=216) +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub struct Position { + /// Line number. + line_number: u64, + /// Column number. + column_number: u64, +} + +impl Position { + /// Creates a new `Position`. + #[inline] + pub fn new(line_number: u64, column_number: u64) -> Self { + Self { + line_number, + column_number, + } + } + + /// Gets the line number of the position. + #[inline] + pub fn line_number(self) -> u64 { + self.line_number + } + + /// Gets the column number of the position. + #[inline] + pub fn column_number(self) -> u64 { + self.column_number + } +} + +impl fmt::Display for Position { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}:{}", self.line_number, self.column_number) + } +} + +/// A span in the JavaScript source code. +/// +/// Stores a start position and an end position. +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct Span { + start: Position, + end: Position, +} + +impl Span { + /// Creates a new `Span`. + #[inline] + pub fn new(start: Position, end: Position) -> Self { + assert!(start <= end, "a span cannot start after its end"); + + Self { start, end } + } + + /// Gets the starting position of the span. + #[inline] + pub fn start(self) -> Position { + self.start + } + + /// Gets the final position of the span. + #[inline] + pub fn end(self) -> Position { + self.end + } + + /// Checks if this span inclusively contains another span or position. + #[inline] + pub fn contains(self, other: S) -> bool + where + S: Into, + { + let other = other.into(); + self.start <= other.start && self.end >= other.end + } +} + +impl From for Span { + fn from(pos: Position) -> Self { + Self { + start: pos, + end: pos, + } + } +} + +impl PartialOrd for Span { + fn partial_cmp(&self, other: &Self) -> Option { + if self == other { + Some(Ordering::Equal) + } else if self.end < other.start { + Some(Ordering::Less) + } else if self.start > other.end { + Some(Ordering::Greater) + } else { + None + } + } +} + +impl fmt::Display for Span { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "[{}..{}]", self.start, self.end) + } +} + +#[cfg(test)] +mod tests { + use super::{Position, Span}; + + /// Checks that the `PartialEq` implementation of `Position` is consistent. + #[test] + fn position_equality() { + assert_eq!(Position::new(10, 50), Position::new(10, 50)); + assert_ne!(Position::new(10, 50), Position::new(10, 51)); + assert_ne!(Position::new(10, 50), Position::new(11, 50)); + assert_ne!(Position::new(10, 50), Position::new(11, 51)); + } + + /// Checks that the `PartialOrd` implementation of `Position` is consistent. + #[test] + fn position_order() { + assert!(Position::new(10, 50) < Position::new(10, 51)); + assert!(Position::new(9, 50) < Position::new(10, 50)); + assert!(Position::new(10, 50) < Position::new(11, 51)); + assert!(Position::new(10, 50) < Position::new(11, 49)); + + assert!(Position::new(10, 51) > Position::new(10, 50)); + assert!(Position::new(10, 50) > Position::new(9, 50)); + assert!(Position::new(11, 51) > Position::new(10, 50)); + assert!(Position::new(11, 49) > Position::new(10, 50)); + } + + /// Checks that the position getters actually retreive correct values. + #[test] + fn position_getters() { + let pos = Position::new(10, 50); + assert_eq!(pos.line_number(), 10); + assert_eq!(pos.column_number(), 50); + } + + /// Checks that the string representation of a position is correct. + #[test] + fn position_to_string() { + let pos = Position::new(10, 50); + + assert_eq!("10:50", pos.to_string()); + assert_eq!("10:50", format!("{}", pos)); + } + + /// Checks that we cannot create an invalid span. + #[test] + #[should_panic] + fn invalid_span() { + let a = Position::new(10, 30); + let b = Position::new(10, 50); + Span::new(b, a); + } + + /// Checks that we can create valid spans. + #[test] + fn span_creation() { + let a = Position::new(10, 30); + let b = Position::new(10, 50); + + let _ = Span::new(a, b); + let _ = Span::new(a, a); + let _ = Span::from(a); + } + + /// Checks that the `PartialEq` implementation of `Span` is consistent. + #[test] + fn span_equality() { + let a = Position::new(10, 50); + let b = Position::new(10, 52); + let c = Position::new(11, 20); + + let span_ab = Span::new(a, b); + let span_ab_2 = Span::new(a, b); + let span_ac = Span::new(a, c); + let span_bc = Span::new(b, c); + + assert_eq!(span_ab, span_ab_2); + assert_ne!(span_ab, span_ac); + assert_ne!(span_ab, span_bc); + assert_ne!(span_bc, span_ac); + + let span_a = Span::from(a); + let span_aa = Span::new(a, a); + + assert_eq!(span_a, span_aa); + } + + /// Checks that the getters retrieve the correct value. + #[test] + fn span_getters() { + let a = Position::new(10, 50); + let b = Position::new(10, 52); + + let span = Span::new(a, b); + + assert_eq!(span.start(), a); + assert_eq!(span.end(), b); + } + + /// Checks that the `Span::contains()` method works properly. + #[test] + fn span_contains() { + let a = Position::new(10, 50); + let b = Position::new(10, 52); + let c = Position::new(11, 20); + let d = Position::new(12, 5); + + let span_ac = Span::new(a, c); + assert!(span_ac.contains(b)); + + let span_ab = Span::new(a, b); + let span_dc = Span::new(d, c); + + assert!(!span_ab.contains(span_dc)); + assert!(span_ab.contains(b)); + + let span_ad = Span::new(a, d); + let span_bc = Span::new(b, c); + + assert!(span_ad.contains(span_bc)); + assert!(!span_bc.contains(span_ad)); + + let span_ac = Span::new(a, c); + let span_bd = Span::new(b, d); + + assert!(!span_ac.contains(span_bd)); + assert!(!span_bd.contains(span_ac)); + } + + /// Checks that the string representation of a span is correct. + #[test] + fn span_to_string() { + let a = Position::new(10, 50); + let b = Position::new(11, 20); + let span = Span::new(a, b); + + assert_eq!("[10:50..11:20]", span.to_string()); + assert_eq!("[10:50..11:20]", format!("{}", span)); + } + + /// Checks that the ordering of spans is correct. + #[test] + fn span_ordering() { + let a = Position::new(10, 50); + let b = Position::new(10, 52); + let c = Position::new(11, 20); + let d = Position::new(12, 5); + + let span_ab = Span::new(a, b); + let span_dc = Span::new(d, c); + + assert!(span_ab < span_dc); + assert!(span_dc > span_ab); + } +} diff --git a/boa/src/syntax/ast/punc.rs b/boa/src/syntax/ast/punctuator.rs similarity index 100% rename from boa/src/syntax/ast/punc.rs rename to boa/src/syntax/ast/punctuator.rs diff --git a/boa/src/syntax/ast/token.rs b/boa/src/syntax/ast/token.rs index 0df76ac7738..037a17d40da 100644 --- a/boa/src/syntax/ast/token.rs +++ b/boa/src/syntax/ast/token.rs @@ -5,7 +5,7 @@ //! //! [spec]: https://tc39.es/ecma262/#sec-tokens -use crate::syntax::ast::{keyword::Keyword, pos::Position, punc::Punctuator}; +use crate::syntax::ast::{Keyword, Punctuator, Span}; use std::fmt::{Debug, Display, Formatter, Result}; #[cfg(feature = "serde")] @@ -21,19 +21,24 @@ use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, PartialEq)] pub struct Token { /// The token kind, which contains the actual data of the token. - pub kind: TokenKind, - - /// The token position from origina source code. - pub pos: Position, + kind: TokenKind, + /// The token position in the original source code. + span: Span, } impl Token { /// Create a new detailed token from the token data, line number and column number - pub fn new(kind: TokenKind, line_number: u64, column_number: u64) -> Self { - Self { - kind, - pos: Position::new(line_number, column_number), - } + pub fn new(kind: TokenKind, span: Span) -> Self { + Self { kind, span } + } + + pub fn kind(&self) -> &TokenKind { + &self.kind + } + + /// Gets the token span in the original source code. + pub fn span(&self) -> Span { + self.span } } diff --git a/boa/src/syntax/lexer/mod.rs b/boa/src/syntax/lexer/mod.rs index cda968f4e15..ac035284279 100644 --- a/boa/src/syntax/lexer/mod.rs +++ b/boa/src/syntax/lexer/mod.rs @@ -7,8 +7,8 @@ mod tests; use crate::syntax::ast::{ - punc::Punctuator, token::{NumericLiteral, Token, TokenKind}, + Position, Punctuator, Span, }; use std::{ char::{decode_utf16, from_u32}, @@ -26,7 +26,7 @@ macro_rules! vop { match preview { '=' => { $this.next(); - $this.column_number += 1; + $this.next_column(); $assign_op } _ => $op, @@ -37,12 +37,12 @@ macro_rules! vop { match preview { '=' => { $this.next(); - $this.column_number += 1; + $this.next_column(); $assign_op }, $($case => { $this.next(); - $this.column_number += 1; + $this.next_column(); $block })+, _ => $op @@ -53,7 +53,7 @@ macro_rules! vop { match preview { $($case => { $this.next()?; - $this.column_number += 1; + $this.next_column(); $block })+, _ => $op @@ -64,17 +64,19 @@ macro_rules! vop { /// The `op` macro handles binary operations or assignment operations and converts them into tokens. macro_rules! op { ($this:ident, $assign_op:expr, $op:expr) => ({ + let start_pos = $this.position; let punc = vop!($this, $assign_op, $op); - $this.push_punc(punc); + $this.push_punc(punc, start_pos); }); ($this:ident, $assign_op:expr, $op:expr, {$($case:pat => $block:expr),+}) => ({ + let start_pos = $this.position; let punc = vop!($this, $assign_op, $op, {$($case => $block),+}); - $this.push_punc(punc); - }); - ($this:ident, $op:expr, {$($case:pat => $block:expr),+}) => ({ - let punc = vop!($this, $op, {$($case => $block),+}); - $this.push_punc(); + $this.push_punc(punc, start_pos); }); + // ($this:ident, $op:expr, {$($case:pat => $block:expr),+}) => ({ + // let punc = vop!($this, $op, {$($case => $block),+}); + // $this.push_punc(); + // }); } /// An error that occurred during lexing or compiling of the source input. @@ -121,10 +123,8 @@ pub struct Lexer<'a> { /// /// This field is public so you can use them once lexing has finished. pub tokens: Vec, - /// The current line number in the script - line_number: u64, - /// the current column number in the script - column_number: u64, + /// The current position in the source code. + position: Position, /// The full Peekable buffer, an array of [Char]s buffer: Peekable>, } @@ -136,21 +136,55 @@ impl<'a> Lexer<'a> { pub fn new(buffer: &'a str) -> Lexer<'a> { Lexer { tokens: Vec::new(), - line_number: 1, - column_number: 0, + position: Position::new(1, 1), buffer: buffer.chars().peekable(), } } /// Push a token onto the token queue. - fn push_token(&mut self, tk: TokenKind) { + fn push_token(&mut self, tk: TokenKind, start: Position) { self.tokens - .push(Token::new(tk, self.line_number, self.column_number)) + .push(Token::new(tk, Span::new(start, self.position))) } /// Push a punctuation token - fn push_punc(&mut self, punc: Punctuator) { - self.push_token(TokenKind::Punctuator(punc)); + fn push_punc(&mut self, punc: Punctuator, start: Position) { + self.push_token(TokenKind::Punctuator(punc), start); + } + + /// Changes the current position by advancing to the next column. + fn next_column(&mut self) { + let pos = Position::new( + self.position.line_number(), + self.position.column_number() + 1, + ); + self.position = pos; + } + + /// Changes the current position by advancing the given number of columns. + fn move_columns(&mut self, columns: u64) { + let pos = Position::new( + self.position.line_number(), + self.position.column_number() + columns, + ); + self.position = pos; + } + + fn carriage_return(&mut self) { + let pos = Position::new(self.position.line_number(), 0); + self.position = pos; + } + + /// Changes the current position by advancing to the next line. + fn next_line(&mut self) { + let pos = Position::new(self.position.line_number() + 1, 1); + self.position = pos; + } + + /// Changes the current position by advancing the given number of lines. + fn move_lines(&mut self, lines: u64) { + let pos = Position::new(self.position.line_number() + lines, 1); + self.position = pos; } /// next fetches the next token and return it, or a LexerError if there are no more. @@ -266,11 +300,15 @@ impl<'a> Lexer<'a> { let mut buf = ch.to_string(); let mut position_offset = 0; let mut kind = NumericKind::Integer(10); + let start_pos = self.position; if ch == '0' { match self.preview_next() { None => { - self.push_token(TokenKind::NumericLiteral(NumericLiteral::Integer(0))); - self.column_number += 1; + self.push_token( + TokenKind::NumericLiteral(NumericLiteral::Integer(0)), + start_pos, + ); + self.next_column(); return Ok(()); } Some('x') | Some('X') => { @@ -425,8 +463,8 @@ impl<'a> Lexer<'a> { } }; - self.push_token(TokenKind::NumericLiteral(num)); - self.column_number += (buf.len() as u64) + position_offset - 1; + self.push_token(TokenKind::NumericLiteral(num), start_pos); + self.move_columns((buf.len() as u64) + position_offset - 1); Ok(()) } @@ -449,7 +487,8 @@ impl<'a> Lexer<'a> { if self.preview_next().is_none() { return Ok(()); } - self.column_number += 1; + self.next_column(); + let start_pos = self.position; let ch = self.next(); match ch { '"' | '\'' => { @@ -486,7 +525,7 @@ impl<'a> Lexer<'a> { } nums.push(self.next()); } - self.column_number += 2; + self.move_columns(2); let as_num = match u64::from_str_radix(&nums, 16) { Ok(v) => v, Err(_) => 0, @@ -494,8 +533,8 @@ impl<'a> Lexer<'a> { match from_u32(as_num as u32) { Some(v) => v, None => panic!( - "{}:{}: {} is not a valid unicode scalar value", - self.line_number, self.column_number, as_num + "{}: {} is not a valid unicode scalar value", + self.position, as_num ), } } @@ -522,8 +561,7 @@ impl<'a> Lexer<'a> { return Err(LexerError::new("Unterminated String")); } self.next(); // '}' - self.column_number += - (s.len() as u64).wrapping_add(3); + self.move_columns((s.len() as u64).wrapping_add(3)); c } else { let mut codepoints: Vec = vec![]; @@ -540,8 +578,7 @@ impl<'a> Lexer<'a> { }; codepoints.push(as_num); - self.column_number += - (s.len() as u64).wrapping_add(2); + self.move_columns((s.len() as u64).wrapping_add(2)); // Check for another UTF-16 codepoint if self.next_is('\\') && self.next_is('u') { @@ -560,7 +597,7 @@ impl<'a> Lexer<'a> { } '\'' | '"' | '\\' => escape, ch => { - let details = format!("{}:{}: Invalid escape `{}`", self.line_number, self.column_number, ch); + let details = format!("{}: Invalid escape `{}`", self.position, ch); return Err(LexerError { details }); } }; @@ -571,11 +608,11 @@ impl<'a> Lexer<'a> { } } let str_length = buf.len() as u64; - self.push_token(TokenKind::StringLiteral(buf)); // Why +1? Quotation marks are not included, // So technically it would be +2, (for both " ") but we want to be 1 less // to compensate for the incrementing at the top - self.column_number += str_length.wrapping_add(1); + self.move_columns( str_length.wrapping_add(1)); + self.push_token(TokenKind::StringLiteral(buf), start_pos); } _ if ch.is_digit(10) => self.reed_numerical_literal(ch)?, _ if ch.is_alphabetic() || ch == '$' || ch == '_' => { @@ -587,8 +624,7 @@ impl<'a> Lexer<'a> { break; } } - - self.push_token(match buf.as_str() { + let tk = match buf.as_str() { "true" => TokenKind::BooleanLiteral(true), "false" => TokenKind::BooleanLiteral(false), "null" => TokenKind::NullLiteral, @@ -599,33 +635,36 @@ impl<'a> Lexer<'a> { TokenKind::identifier(slice) } } - }); - // Move position forward the length of keyword - self.column_number += (buf.len().wrapping_sub(1)) as u64; + }; + + // Move position forward the length of the token + self.move_columns( (buf.len().wrapping_sub(1)) as u64); + + self.push_token(tk, start_pos); } - ';' => self.push_punc(Punctuator::Semicolon), - ':' => self.push_punc(Punctuator::Colon), + ';' => self.push_punc(Punctuator::Semicolon, start_pos), + ':' => self.push_punc(Punctuator::Colon, start_pos), '.' => { // . or ... if self.next_is('.') { if self.next_is('.') { - self.push_punc(Punctuator::Spread); - self.column_number += 2; + self.push_punc(Punctuator::Spread, start_pos); + self.move_columns( 2); } else { return Err(LexerError::new("Expecting Token .")); } } else { - self.push_punc(Punctuator::Dot); + self.push_punc(Punctuator::Dot, start_pos); }; } - '(' => self.push_punc(Punctuator::OpenParen), - ')' => self.push_punc(Punctuator::CloseParen), - ',' => self.push_punc(Punctuator::Comma), - '{' => self.push_punc(Punctuator::OpenBlock), - '}' => self.push_punc(Punctuator::CloseBlock), - '[' => self.push_punc(Punctuator::OpenBracket), - ']' => self.push_punc(Punctuator::CloseBracket), - '?' => self.push_punc(Punctuator::Question), + '(' => self.push_punc(Punctuator::OpenParen, start_pos), + ')' => self.push_punc(Punctuator::CloseParen, start_pos), + ',' => self.push_punc(Punctuator::Comma, start_pos), + '{' => self.push_punc(Punctuator::OpenBlock, start_pos), + '}' => self.push_punc(Punctuator::CloseBlock, start_pos), + '[' => self.push_punc(Punctuator::OpenBracket, start_pos), + ']' => self.push_punc(Punctuator::CloseBracket, start_pos), + '?' => self.push_punc(Punctuator::Question, start_pos), // Comments '/' => { if let Some(ch) = self.preview_next() { @@ -637,8 +676,7 @@ impl<'a> Lexer<'a> { break; } } - self.line_number += 1; - self.column_number = 0; + self.next_line() } // block comment '*' => { @@ -660,8 +698,7 @@ impl<'a> Lexer<'a> { }, } } - self.line_number += lines; - self.column_number = 0; + self.move_lines(lines); } // division, assigndiv or regex literal _ => { @@ -672,7 +709,7 @@ impl<'a> Lexer<'a> { let mut body = String::new(); let mut regex = false; loop { - self.column_number +=1; + self.next_column(); match self.buffer.next() { // end of body Some('/') => { @@ -682,14 +719,14 @@ impl<'a> Lexer<'a> { // newline/eof not allowed in regex literal n @ Some('\n') | n @ Some('\r') | n @ Some('\u{2028}') | n @ Some('\u{2029}') => { - self.column_number = 0; + self.carriage_return(); if n != Some('\r') { - self.line_number += 1; + self.next_line(); } break }, None => { - self.column_number -= 1; + self.position = Position::new(self.position.line_number(), self.position.column_number()-1); break } // escape sequence @@ -712,7 +749,7 @@ impl<'a> Lexer<'a> { let flags = self.take_char_while(char::is_alphabetic)?; self.push_token(TokenKind::RegularExpressionLiteral( body, flags, - )); + ), start_pos); } else { // failed to parse regex, restore original buffer position and // parse either div or assigndiv @@ -720,9 +757,9 @@ impl<'a> Lexer<'a> { if self.next_is('=') { self.push_token(TokenKind::Punctuator( Punctuator::AssignDiv, - )); + ), start_pos); } else { - self.push_token(TokenKind::Punctuator(Punctuator::Div)); + self.push_token(TokenKind::Punctuator(Punctuator::Div), start_pos); } } } @@ -772,14 +809,13 @@ impl<'a> Lexer<'a> { vop!(self, Punctuator::StrictNotEq, Punctuator::NotEq), Punctuator::Not ), - '~' => self.push_punc(Punctuator::Neg), + '~' => self.push_punc(Punctuator::Neg, start_pos), '\n' | '\u{2028}' | '\u{2029}' => { - self.push_token(TokenKind::LineTerminator); - self.line_number += 1; - self.column_number = 0; + self.next_line(); + self.push_token(TokenKind::LineTerminator, start_pos); } '\r' => { - self.column_number = 0; + self.carriage_return(); } // The rust char::is_whitespace function and the ecma standard use different sets // of characters as whitespaces: @@ -791,7 +827,7 @@ impl<'a> Lexer<'a> { // Unicode Space_Seperator category (minus \u{0020} and \u{00A0} which are allready stated above) '\u{1680}' | '\u{2000}'..='\u{200A}' | '\u{202F}' | '\u{205F}' | '\u{3000}' => (), _ => { - let details = format!("{}:{}: Unexpected '{}'", self.line_number, self.column_number, ch); + let details = format!("{}: Unexpected '{}'", self.position, ch); return Err(LexerError { details }); }, } diff --git a/boa/src/syntax/lexer/tests.rs b/boa/src/syntax/lexer/tests.rs index a50bc3b5b63..855d8b46a30 100644 --- a/boa/src/syntax/lexer/tests.rs +++ b/boa/src/syntax/lexer/tests.rs @@ -2,16 +2,20 @@ #![allow(clippy::indexing_slicing)] use super::*; -use crate::syntax::ast::keyword::Keyword; +use crate::syntax::ast::Keyword; + +fn span(start: (u64, u64), end: (u64, u64)) -> Span { + Span::new(Position::new(start.0, start.1), Position::new(end.0, end.1)) +} #[test] fn check_single_line_comment() { let s1 = "var \n//This is a comment\ntrue"; let mut lexer = Lexer::new(s1); lexer.lex().expect("failed to lex"); - assert_eq!(lexer.tokens[0].kind, TokenKind::Keyword(Keyword::Var)); - assert_eq!(lexer.tokens[1].kind, TokenKind::LineTerminator); - assert_eq!(lexer.tokens[2].kind, TokenKind::BooleanLiteral(true)); + assert_eq!(lexer.tokens[0].kind(), &TokenKind::Keyword(Keyword::Var)); + assert_eq!(lexer.tokens[1].kind(), &TokenKind::LineTerminator); + assert_eq!(lexer.tokens[2].kind(), &TokenKind::BooleanLiteral(true)); } #[test] @@ -19,8 +23,8 @@ fn check_multi_line_comment() { let s = "var /* await \n break \n*/ x"; let mut lexer = Lexer::new(s); lexer.lex().expect("failed to lex"); - assert_eq!(lexer.tokens[0].kind, TokenKind::Keyword(Keyword::Var)); - assert_eq!(lexer.tokens[1].kind, TokenKind::identifier("x")); + assert_eq!(lexer.tokens[0].kind(), &TokenKind::Keyword(Keyword::Var)); + assert_eq!(lexer.tokens[1].kind(), &TokenKind::identifier("x")); } #[test] @@ -28,9 +32,9 @@ fn check_string() { let s = "'aaa' \"bbb\""; let mut lexer = Lexer::new(s); lexer.lex().expect("failed to lex"); - assert_eq!(lexer.tokens[0].kind, TokenKind::string_literal("aaa")); + assert_eq!(lexer.tokens[0].kind(), &TokenKind::string_literal("aaa")); - assert_eq!(lexer.tokens[1].kind, TokenKind::string_literal("bbb")); + assert_eq!(lexer.tokens[1].kind(), &TokenKind::string_literal("bbb")); } #[test] @@ -42,191 +46,200 @@ fn check_punctuators() { let mut lexer = Lexer::new(s); lexer.lex().expect("failed to lex"); assert_eq!( - lexer.tokens[0].kind, - TokenKind::Punctuator(Punctuator::OpenBlock) + lexer.tokens[0].kind(), + &TokenKind::Punctuator(Punctuator::OpenBlock) + ); + assert_eq!( + lexer.tokens[1].kind(), + &TokenKind::Punctuator(Punctuator::OpenParen) + ); + assert_eq!( + lexer.tokens[2].kind(), + &TokenKind::Punctuator(Punctuator::CloseParen) + ); + assert_eq!( + lexer.tokens[3].kind(), + &TokenKind::Punctuator(Punctuator::OpenBracket) ); assert_eq!( - lexer.tokens[1].kind, - TokenKind::Punctuator(Punctuator::OpenParen) + lexer.tokens[4].kind(), + &TokenKind::Punctuator(Punctuator::CloseBracket) ); assert_eq!( - lexer.tokens[2].kind, - TokenKind::Punctuator(Punctuator::CloseParen) + lexer.tokens[5].kind(), + &TokenKind::Punctuator(Punctuator::Dot) ); assert_eq!( - lexer.tokens[3].kind, - TokenKind::Punctuator(Punctuator::OpenBracket) + lexer.tokens[6].kind(), + &TokenKind::Punctuator(Punctuator::Spread) ); assert_eq!( - lexer.tokens[4].kind, - TokenKind::Punctuator(Punctuator::CloseBracket) + lexer.tokens[7].kind(), + &TokenKind::Punctuator(Punctuator::Semicolon) ); - assert_eq!(lexer.tokens[5].kind, TokenKind::Punctuator(Punctuator::Dot)); assert_eq!( - lexer.tokens[6].kind, - TokenKind::Punctuator(Punctuator::Spread) + lexer.tokens[8].kind(), + &TokenKind::Punctuator(Punctuator::Comma) ); assert_eq!( - lexer.tokens[7].kind, - TokenKind::Punctuator(Punctuator::Semicolon) + lexer.tokens[9].kind(), + &TokenKind::Punctuator(Punctuator::LessThan) ); assert_eq!( - lexer.tokens[8].kind, - TokenKind::Punctuator(Punctuator::Comma) + lexer.tokens[10].kind(), + &TokenKind::Punctuator(Punctuator::GreaterThan) ); assert_eq!( - lexer.tokens[9].kind, - TokenKind::Punctuator(Punctuator::LessThan) + lexer.tokens[11].kind(), + &TokenKind::Punctuator(Punctuator::LessThanOrEq) ); assert_eq!( - lexer.tokens[10].kind, - TokenKind::Punctuator(Punctuator::GreaterThan) + lexer.tokens[12].kind(), + &TokenKind::Punctuator(Punctuator::GreaterThanOrEq) ); assert_eq!( - lexer.tokens[11].kind, - TokenKind::Punctuator(Punctuator::LessThanOrEq) + lexer.tokens[13].kind(), + &TokenKind::Punctuator(Punctuator::Eq) ); assert_eq!( - lexer.tokens[12].kind, - TokenKind::Punctuator(Punctuator::GreaterThanOrEq) + lexer.tokens[14].kind(), + &TokenKind::Punctuator(Punctuator::NotEq) ); - assert_eq!(lexer.tokens[13].kind, TokenKind::Punctuator(Punctuator::Eq)); assert_eq!( - lexer.tokens[14].kind, - TokenKind::Punctuator(Punctuator::NotEq) + lexer.tokens[15].kind(), + &TokenKind::Punctuator(Punctuator::StrictEq) ); assert_eq!( - lexer.tokens[15].kind, - TokenKind::Punctuator(Punctuator::StrictEq) + lexer.tokens[16].kind(), + &TokenKind::Punctuator(Punctuator::StrictNotEq) ); assert_eq!( - lexer.tokens[16].kind, - TokenKind::Punctuator(Punctuator::StrictNotEq) + lexer.tokens[17].kind(), + &TokenKind::Punctuator(Punctuator::Add) ); assert_eq!( - lexer.tokens[17].kind, - TokenKind::Punctuator(Punctuator::Add) + lexer.tokens[18].kind(), + &TokenKind::Punctuator(Punctuator::Sub) ); assert_eq!( - lexer.tokens[18].kind, - TokenKind::Punctuator(Punctuator::Sub) + lexer.tokens[19].kind(), + &TokenKind::Punctuator(Punctuator::Mul) ); assert_eq!( - lexer.tokens[19].kind, - TokenKind::Punctuator(Punctuator::Mul) + lexer.tokens[20].kind(), + &TokenKind::Punctuator(Punctuator::Mod) ); assert_eq!( - lexer.tokens[20].kind, - TokenKind::Punctuator(Punctuator::Mod) + lexer.tokens[21].kind(), + &TokenKind::Punctuator(Punctuator::Dec) ); assert_eq!( - lexer.tokens[21].kind, - TokenKind::Punctuator(Punctuator::Dec) + lexer.tokens[22].kind(), + &TokenKind::Punctuator(Punctuator::LeftSh) ); assert_eq!( - lexer.tokens[22].kind, - TokenKind::Punctuator(Punctuator::LeftSh) + lexer.tokens[23].kind(), + &TokenKind::Punctuator(Punctuator::RightSh) ); assert_eq!( - lexer.tokens[23].kind, - TokenKind::Punctuator(Punctuator::RightSh) + lexer.tokens[24].kind(), + &TokenKind::Punctuator(Punctuator::URightSh) ); assert_eq!( - lexer.tokens[24].kind, - TokenKind::Punctuator(Punctuator::URightSh) + lexer.tokens[25].kind(), + &TokenKind::Punctuator(Punctuator::And) ); assert_eq!( - lexer.tokens[25].kind, - TokenKind::Punctuator(Punctuator::And) + lexer.tokens[26].kind(), + &TokenKind::Punctuator(Punctuator::Or) ); - assert_eq!(lexer.tokens[26].kind, TokenKind::Punctuator(Punctuator::Or)); assert_eq!( - lexer.tokens[27].kind, - TokenKind::Punctuator(Punctuator::Xor) + lexer.tokens[27].kind(), + &TokenKind::Punctuator(Punctuator::Xor) ); assert_eq!( - lexer.tokens[28].kind, - TokenKind::Punctuator(Punctuator::Not) + lexer.tokens[28].kind(), + &TokenKind::Punctuator(Punctuator::Not) ); assert_eq!( - lexer.tokens[29].kind, - TokenKind::Punctuator(Punctuator::Neg) + lexer.tokens[29].kind(), + &TokenKind::Punctuator(Punctuator::Neg) ); assert_eq!( - lexer.tokens[30].kind, - TokenKind::Punctuator(Punctuator::BoolAnd) + lexer.tokens[30].kind(), + &TokenKind::Punctuator(Punctuator::BoolAnd) ); assert_eq!( - lexer.tokens[31].kind, - TokenKind::Punctuator(Punctuator::BoolOr) + lexer.tokens[31].kind(), + &TokenKind::Punctuator(Punctuator::BoolOr) ); assert_eq!( - lexer.tokens[32].kind, - TokenKind::Punctuator(Punctuator::Question) + lexer.tokens[32].kind(), + &TokenKind::Punctuator(Punctuator::Question) ); assert_eq!( - lexer.tokens[33].kind, - TokenKind::Punctuator(Punctuator::Colon) + lexer.tokens[33].kind(), + &TokenKind::Punctuator(Punctuator::Colon) ); assert_eq!( - lexer.tokens[34].kind, - TokenKind::Punctuator(Punctuator::Assign) + lexer.tokens[34].kind(), + &TokenKind::Punctuator(Punctuator::Assign) ); assert_eq!( - lexer.tokens[35].kind, - TokenKind::Punctuator(Punctuator::AssignAdd) + lexer.tokens[35].kind(), + &TokenKind::Punctuator(Punctuator::AssignAdd) ); assert_eq!( - lexer.tokens[36].kind, - TokenKind::Punctuator(Punctuator::AssignSub) + lexer.tokens[36].kind(), + &TokenKind::Punctuator(Punctuator::AssignSub) ); assert_eq!( - lexer.tokens[37].kind, - TokenKind::Punctuator(Punctuator::AssignMul) + lexer.tokens[37].kind(), + &TokenKind::Punctuator(Punctuator::AssignMul) ); assert_eq!( - lexer.tokens[38].kind, - TokenKind::Punctuator(Punctuator::AssignAnd) + lexer.tokens[38].kind(), + &TokenKind::Punctuator(Punctuator::AssignAnd) ); assert_eq!( - lexer.tokens[39].kind, - TokenKind::Punctuator(Punctuator::AssignPow) + lexer.tokens[39].kind(), + &TokenKind::Punctuator(Punctuator::AssignPow) ); assert_eq!( - lexer.tokens[40].kind, - TokenKind::Punctuator(Punctuator::Inc) + lexer.tokens[40].kind(), + &TokenKind::Punctuator(Punctuator::Inc) ); assert_eq!( - lexer.tokens[41].kind, - TokenKind::Punctuator(Punctuator::Exp) + lexer.tokens[41].kind(), + &TokenKind::Punctuator(Punctuator::Exp) ); assert_eq!( - lexer.tokens[42].kind, - TokenKind::Punctuator(Punctuator::AssignLeftSh) + lexer.tokens[42].kind(), + &TokenKind::Punctuator(Punctuator::AssignLeftSh) ); assert_eq!( - lexer.tokens[43].kind, - TokenKind::Punctuator(Punctuator::AssignRightSh) + lexer.tokens[43].kind(), + &TokenKind::Punctuator(Punctuator::AssignRightSh) ); assert_eq!( - lexer.tokens[44].kind, - TokenKind::Punctuator(Punctuator::AssignURightSh) + lexer.tokens[44].kind(), + &TokenKind::Punctuator(Punctuator::AssignURightSh) ); assert_eq!( - lexer.tokens[45].kind, - TokenKind::Punctuator(Punctuator::AssignAnd) + lexer.tokens[45].kind(), + &TokenKind::Punctuator(Punctuator::AssignAnd) ); assert_eq!( - lexer.tokens[46].kind, - TokenKind::Punctuator(Punctuator::AssignOr) + lexer.tokens[46].kind(), + &TokenKind::Punctuator(Punctuator::AssignOr) ); assert_eq!( - lexer.tokens[47].kind, - TokenKind::Punctuator(Punctuator::AssignXor) + lexer.tokens[47].kind(), + &TokenKind::Punctuator(Punctuator::AssignXor) ); assert_eq!( - lexer.tokens[48].kind, - TokenKind::Punctuator(Punctuator::Arrow) + lexer.tokens[48].kind(), + &TokenKind::Punctuator(Punctuator::Arrow) ); } @@ -239,43 +252,76 @@ fn check_keywords() { let mut lexer = Lexer::new(s); lexer.lex().expect("failed to lex"); - assert_eq!(lexer.tokens[0].kind, TokenKind::Keyword(Keyword::Await)); - assert_eq!(lexer.tokens[1].kind, TokenKind::Keyword(Keyword::Break)); - assert_eq!(lexer.tokens[2].kind, TokenKind::Keyword(Keyword::Case)); - assert_eq!(lexer.tokens[3].kind, TokenKind::Keyword(Keyword::Catch)); - assert_eq!(lexer.tokens[4].kind, TokenKind::Keyword(Keyword::Class)); - assert_eq!(lexer.tokens[5].kind, TokenKind::Keyword(Keyword::Const)); - assert_eq!(lexer.tokens[6].kind, TokenKind::Keyword(Keyword::Continue)); - assert_eq!(lexer.tokens[7].kind, TokenKind::Keyword(Keyword::Debugger)); - assert_eq!(lexer.tokens[8].kind, TokenKind::Keyword(Keyword::Default)); - assert_eq!(lexer.tokens[9].kind, TokenKind::Keyword(Keyword::Delete)); - assert_eq!(lexer.tokens[10].kind, TokenKind::Keyword(Keyword::Do)); - assert_eq!(lexer.tokens[11].kind, TokenKind::Keyword(Keyword::Else)); - assert_eq!(lexer.tokens[12].kind, TokenKind::Keyword(Keyword::Export)); - assert_eq!(lexer.tokens[13].kind, TokenKind::Keyword(Keyword::Extends)); - assert_eq!(lexer.tokens[14].kind, TokenKind::Keyword(Keyword::Finally)); - assert_eq!(lexer.tokens[15].kind, TokenKind::Keyword(Keyword::For)); - assert_eq!(lexer.tokens[16].kind, TokenKind::Keyword(Keyword::Function)); - assert_eq!(lexer.tokens[17].kind, TokenKind::Keyword(Keyword::If)); - assert_eq!(lexer.tokens[18].kind, TokenKind::Keyword(Keyword::Import)); - assert_eq!(lexer.tokens[19].kind, TokenKind::Keyword(Keyword::In)); - assert_eq!( - lexer.tokens[20].kind, - TokenKind::Keyword(Keyword::InstanceOf) - ); - assert_eq!(lexer.tokens[21].kind, TokenKind::Keyword(Keyword::New)); - assert_eq!(lexer.tokens[22].kind, TokenKind::Keyword(Keyword::Return)); - assert_eq!(lexer.tokens[23].kind, TokenKind::Keyword(Keyword::Super)); - assert_eq!(lexer.tokens[24].kind, TokenKind::Keyword(Keyword::Switch)); - assert_eq!(lexer.tokens[25].kind, TokenKind::Keyword(Keyword::This)); - assert_eq!(lexer.tokens[26].kind, TokenKind::Keyword(Keyword::Throw)); - assert_eq!(lexer.tokens[27].kind, TokenKind::Keyword(Keyword::Try)); - assert_eq!(lexer.tokens[28].kind, TokenKind::Keyword(Keyword::TypeOf)); - assert_eq!(lexer.tokens[29].kind, TokenKind::Keyword(Keyword::Var)); - assert_eq!(lexer.tokens[30].kind, TokenKind::Keyword(Keyword::Void)); - assert_eq!(lexer.tokens[31].kind, TokenKind::Keyword(Keyword::While)); - assert_eq!(lexer.tokens[32].kind, TokenKind::Keyword(Keyword::With)); - assert_eq!(lexer.tokens[33].kind, TokenKind::Keyword(Keyword::Yield)); + assert_eq!(lexer.tokens[0].kind(), &TokenKind::Keyword(Keyword::Await)); + assert_eq!(lexer.tokens[1].kind(), &TokenKind::Keyword(Keyword::Break)); + assert_eq!(lexer.tokens[2].kind(), &TokenKind::Keyword(Keyword::Case)); + assert_eq!(lexer.tokens[3].kind(), &TokenKind::Keyword(Keyword::Catch)); + assert_eq!(lexer.tokens[4].kind(), &TokenKind::Keyword(Keyword::Class)); + assert_eq!(lexer.tokens[5].kind(), &TokenKind::Keyword(Keyword::Const)); + assert_eq!( + lexer.tokens[6].kind(), + &TokenKind::Keyword(Keyword::Continue) + ); + assert_eq!( + lexer.tokens[7].kind(), + &TokenKind::Keyword(Keyword::Debugger) + ); + assert_eq!( + lexer.tokens[8].kind(), + &TokenKind::Keyword(Keyword::Default) + ); + assert_eq!(lexer.tokens[9].kind(), &TokenKind::Keyword(Keyword::Delete)); + assert_eq!(lexer.tokens[10].kind(), &TokenKind::Keyword(Keyword::Do)); + assert_eq!(lexer.tokens[11].kind(), &TokenKind::Keyword(Keyword::Else)); + assert_eq!( + lexer.tokens[12].kind(), + &TokenKind::Keyword(Keyword::Export) + ); + assert_eq!( + lexer.tokens[13].kind(), + &TokenKind::Keyword(Keyword::Extends) + ); + assert_eq!( + lexer.tokens[14].kind(), + &TokenKind::Keyword(Keyword::Finally) + ); + assert_eq!(lexer.tokens[15].kind(), &TokenKind::Keyword(Keyword::For)); + assert_eq!( + lexer.tokens[16].kind(), + &TokenKind::Keyword(Keyword::Function) + ); + assert_eq!(lexer.tokens[17].kind(), &TokenKind::Keyword(Keyword::If)); + assert_eq!( + lexer.tokens[18].kind(), + &TokenKind::Keyword(Keyword::Import) + ); + assert_eq!(lexer.tokens[19].kind(), &TokenKind::Keyword(Keyword::In)); + assert_eq!( + lexer.tokens[20].kind(), + &TokenKind::Keyword(Keyword::InstanceOf) + ); + assert_eq!(lexer.tokens[21].kind(), &TokenKind::Keyword(Keyword::New)); + assert_eq!( + lexer.tokens[22].kind(), + &TokenKind::Keyword(Keyword::Return) + ); + assert_eq!(lexer.tokens[23].kind(), &TokenKind::Keyword(Keyword::Super)); + assert_eq!( + lexer.tokens[24].kind(), + &TokenKind::Keyword(Keyword::Switch) + ); + assert_eq!(lexer.tokens[25].kind(), &TokenKind::Keyword(Keyword::This)); + assert_eq!(lexer.tokens[26].kind(), &TokenKind::Keyword(Keyword::Throw)); + assert_eq!(lexer.tokens[27].kind(), &TokenKind::Keyword(Keyword::Try)); + assert_eq!( + lexer.tokens[28].kind(), + &TokenKind::Keyword(Keyword::TypeOf) + ); + assert_eq!(lexer.tokens[29].kind(), &TokenKind::Keyword(Keyword::Var)); + assert_eq!(lexer.tokens[30].kind(), &TokenKind::Keyword(Keyword::Void)); + assert_eq!(lexer.tokens[31].kind(), &TokenKind::Keyword(Keyword::While)); + assert_eq!(lexer.tokens[32].kind(), &TokenKind::Keyword(Keyword::With)); + assert_eq!(lexer.tokens[33].kind(), &TokenKind::Keyword(Keyword::Yield)); } #[test] @@ -283,13 +329,13 @@ fn check_variable_definition_tokens() { let s = "let a = 'hello';"; let mut lexer = Lexer::new(s); lexer.lex().expect("failed to lex"); - assert_eq!(lexer.tokens[0].kind, TokenKind::Keyword(Keyword::Let)); - assert_eq!(lexer.tokens[1].kind, TokenKind::identifier("a")); + assert_eq!(lexer.tokens[0].kind(), &TokenKind::Keyword(Keyword::Let)); + assert_eq!(lexer.tokens[1].kind(), &TokenKind::identifier("a")); assert_eq!( - lexer.tokens[2].kind, - TokenKind::Punctuator(Punctuator::Assign) + lexer.tokens[2].kind(), + &TokenKind::Punctuator(Punctuator::Assign) ); - assert_eq!(lexer.tokens[3].kind, TokenKind::string_literal("hello")); + assert_eq!(lexer.tokens[3].kind(), &TokenKind::string_literal("hello")); } #[test] @@ -299,26 +345,25 @@ fn check_positions() { let mut lexer = Lexer::new(s); lexer.lex().expect("failed to lex"); // The first column is 1 (not zero indexed) - assert_eq!(lexer.tokens[0].pos.column_number, 1); - assert_eq!(lexer.tokens[0].pos.line_number, 1); + assert_eq!(lexer.tokens[0].span(), span((1, 1), (1, 7))); + // Dot Token starts on column 8 - assert_eq!(lexer.tokens[1].pos.column_number, 8); - assert_eq!(lexer.tokens[1].pos.line_number, 1); + assert_eq!(lexer.tokens[1].span(), span((1, 8), (1, 8))); + // Log Token starts on column 9 - assert_eq!(lexer.tokens[2].pos.column_number, 9); - assert_eq!(lexer.tokens[2].pos.line_number, 1); + assert_eq!(lexer.tokens[2].span(), span((1, 9), (1, 11))); + // Open parenthesis token starts on column 12 - assert_eq!(lexer.tokens[3].pos.column_number, 12); - assert_eq!(lexer.tokens[3].pos.line_number, 1); + assert_eq!(lexer.tokens[3].span(), span((1, 12), (1, 12))); + // String token starts on column 13 - assert_eq!(lexer.tokens[4].pos.column_number, 13); - assert_eq!(lexer.tokens[4].pos.line_number, 1); + assert_eq!(lexer.tokens[4].span(), span((1, 13), (1, 25))); + // Close parenthesis token starts on column 26 - assert_eq!(lexer.tokens[5].pos.column_number, 26); - assert_eq!(lexer.tokens[5].pos.line_number, 1); + assert_eq!(lexer.tokens[5].span(), span((1, 26), (1, 26))); + // Semi Colon token starts on column 27 - assert_eq!(lexer.tokens[6].pos.column_number, 27); - assert_eq!(lexer.tokens[6].pos.line_number, 1); + assert_eq!(lexer.tokens[6].span(), span((1, 27), (1, 27))); } #[test] @@ -329,8 +374,7 @@ fn two_divisions_in_expression() { lexer.lex().expect("failed to lex"); // dbg!(&lexer.tokens); - assert_eq!(lexer.tokens[11].pos.column_number, 37); - assert_eq!(lexer.tokens[11].pos.line_number, 1); + assert_eq!(lexer.tokens[11].span(), span((1, 37), (1, 37))); } #[test] @@ -340,17 +384,10 @@ fn check_line_numbers() { let mut lexer = Lexer::new(s); lexer.lex().expect("failed to lex"); - assert_eq!(lexer.tokens[0].pos.column_number, 1); - assert_eq!(lexer.tokens[0].pos.line_number, 1); - - assert_eq!(lexer.tokens[1].pos.column_number, 2); - assert_eq!(lexer.tokens[1].pos.line_number, 1); - - assert_eq!(lexer.tokens[2].pos.column_number, 1); - assert_eq!(lexer.tokens[2].pos.line_number, 2); - - assert_eq!(lexer.tokens[3].pos.column_number, 2); - assert_eq!(lexer.tokens[3].pos.line_number, 2); + assert_eq!(lexer.tokens[0].span(), span((1, 1), (1, 1))); + assert_eq!(lexer.tokens[1].span(), span((1, 2), (1, 3))); + assert_eq!(lexer.tokens[2].span(), span((1, 4), (1, 4))); + assert_eq!(lexer.tokens[3].span(), span((1, 4), (1, 6))); } // Increment/Decrement @@ -360,12 +397,15 @@ fn check_decrement_advances_lexer_2_places() { let s = "let a = b--;"; let mut lexer = Lexer::new(s); lexer.lex().expect("failed to lex"); - assert_eq!(lexer.tokens[4].kind, TokenKind::Punctuator(Punctuator::Dec)); + assert_eq!( + lexer.tokens[4].kind(), + &TokenKind::Punctuator(Punctuator::Dec) + ); // Decrementing means adding 2 characters '--', the lexer should consume it as a single token // and move the curser forward by 2, meaning the next token should be a semicolon assert_eq!( - lexer.tokens[5].kind, - TokenKind::Punctuator(Punctuator::Semicolon) + lexer.tokens[5].kind(), + &TokenKind::Punctuator(Punctuator::Semicolon) ); } @@ -376,24 +416,24 @@ fn numbers() { ); lexer.lex().expect("failed to lex"); - assert_eq!(lexer.tokens[0].kind, TokenKind::numeric_literal(1)); - assert_eq!(lexer.tokens[1].kind, TokenKind::numeric_literal(2)); - assert_eq!(lexer.tokens[2].kind, TokenKind::numeric_literal(52)); - assert_eq!(lexer.tokens[3].kind, TokenKind::numeric_literal(46)); - assert_eq!(lexer.tokens[4].kind, TokenKind::numeric_literal(7.89)); - assert_eq!(lexer.tokens[5].kind, TokenKind::numeric_literal(42.0)); - assert_eq!(lexer.tokens[6].kind, TokenKind::numeric_literal(5000.0)); - assert_eq!(lexer.tokens[7].kind, TokenKind::numeric_literal(5000.0)); - assert_eq!(lexer.tokens[8].kind, TokenKind::numeric_literal(0.005)); - assert_eq!(lexer.tokens[9].kind, TokenKind::numeric_literal(2)); - assert_eq!(lexer.tokens[10].kind, TokenKind::numeric_literal(83)); - assert_eq!(lexer.tokens[11].kind, TokenKind::numeric_literal(999)); - assert_eq!(lexer.tokens[12].kind, TokenKind::numeric_literal(10.0)); - assert_eq!(lexer.tokens[13].kind, TokenKind::numeric_literal(0.1)); - assert_eq!(lexer.tokens[14].kind, TokenKind::numeric_literal(10.0)); - assert_eq!(lexer.tokens[15].kind, TokenKind::numeric_literal(10.0)); - assert_eq!(lexer.tokens[16].kind, TokenKind::numeric_literal(0.0)); - assert_eq!(lexer.tokens[17].kind, TokenKind::numeric_literal(0.12)); + assert_eq!(lexer.tokens[0].kind(), &TokenKind::numeric_literal(1)); + assert_eq!(lexer.tokens[1].kind(), &TokenKind::numeric_literal(2)); + assert_eq!(lexer.tokens[2].kind(), &TokenKind::numeric_literal(52)); + assert_eq!(lexer.tokens[3].kind(), &TokenKind::numeric_literal(46)); + assert_eq!(lexer.tokens[4].kind(), &TokenKind::numeric_literal(7.89)); + assert_eq!(lexer.tokens[5].kind(), &TokenKind::numeric_literal(42.0)); + assert_eq!(lexer.tokens[6].kind(), &TokenKind::numeric_literal(5000.0)); + assert_eq!(lexer.tokens[7].kind(), &TokenKind::numeric_literal(5000.0)); + assert_eq!(lexer.tokens[8].kind(), &TokenKind::numeric_literal(0.005)); + assert_eq!(lexer.tokens[9].kind(), &TokenKind::numeric_literal(2)); + assert_eq!(lexer.tokens[10].kind(), &TokenKind::numeric_literal(83)); + assert_eq!(lexer.tokens[11].kind(), &TokenKind::numeric_literal(999)); + assert_eq!(lexer.tokens[12].kind(), &TokenKind::numeric_literal(10.0)); + assert_eq!(lexer.tokens[13].kind(), &TokenKind::numeric_literal(0.1)); + assert_eq!(lexer.tokens[14].kind(), &TokenKind::numeric_literal(10.0)); + assert_eq!(lexer.tokens[15].kind(), &TokenKind::numeric_literal(10.0)); + assert_eq!(lexer.tokens[16].kind(), &TokenKind::numeric_literal(0.0)); + assert_eq!(lexer.tokens[17].kind(), &TokenKind::numeric_literal(0.12)); } #[test] @@ -401,11 +441,14 @@ fn implicit_octal_edge_case() { let mut lexer = Lexer::new("044.5 094.5"); lexer.lex().expect("failed to lex"); - assert_eq!(lexer.tokens[0].kind, TokenKind::numeric_literal(36)); - assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Dot)); - assert_eq!(lexer.tokens[2].kind, TokenKind::numeric_literal(5)); + assert_eq!(lexer.tokens[0].kind(), &TokenKind::numeric_literal(36)); + assert_eq!( + lexer.tokens[1].kind(), + &TokenKind::Punctuator(Punctuator::Dot) + ); + assert_eq!(lexer.tokens[2].kind(), &TokenKind::numeric_literal(5)); - assert_eq!(lexer.tokens[3].kind, TokenKind::numeric_literal(94.5)); + assert_eq!(lexer.tokens[3].kind(), &TokenKind::numeric_literal(94.5)); } #[test] @@ -413,16 +456,19 @@ fn hexadecimal_edge_case() { let mut lexer = Lexer::new("0xffff.ff 0xffffff"); lexer.lex().expect("failed to lex"); - assert_eq!(lexer.tokens[0].kind, TokenKind::numeric_literal(0xffff)); - assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Dot)); + assert_eq!(lexer.tokens[0].kind(), &TokenKind::numeric_literal(0xffff)); + assert_eq!( + lexer.tokens[1].kind(), + &TokenKind::Punctuator(Punctuator::Dot) + ); assert_eq!( - lexer.tokens[2].kind, - TokenKind::Identifier(String::from("ff")) + lexer.tokens[2].kind(), + &TokenKind::Identifier(String::from("ff")) ); assert_eq!( - lexer.tokens[3].kind, - TokenKind::numeric_literal(0x00ff_ffff) + lexer.tokens[3].kind(), + &TokenKind::numeric_literal(0x00ff_ffff) ); } @@ -436,8 +482,11 @@ fn single_number_without_semicolon() { fn number_followed_by_dot() { let mut lexer = Lexer::new("1.."); lexer.lex().expect("failed to lex"); - assert_eq!(lexer.tokens[0].kind, TokenKind::numeric_literal(1.0)); - assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Dot)); + assert_eq!(lexer.tokens[0].kind(), &TokenKind::numeric_literal(1.0)); + assert_eq!( + lexer.tokens[1].kind(), + &TokenKind::Punctuator(Punctuator::Dot) + ); } #[test] @@ -445,8 +494,8 @@ fn regex_literal() { let mut lexer = Lexer::new("/(?:)/"); lexer.lex().expect("failed to lex"); assert_eq!( - lexer.tokens[0].kind, - TokenKind::regular_expression_literal("(?:)", "") + lexer.tokens[0].kind(), + &TokenKind::regular_expression_literal("(?:)", "") ); } @@ -455,8 +504,8 @@ fn regex_literal_flags() { let mut lexer = Lexer::new(r"/\/[^\/]*\/*/gmi"); lexer.lex().expect("failed to lex"); assert_eq!( - lexer.tokens[0].kind, - TokenKind::regular_expression_literal("\\/[^\\/]*\\/*", "gmi") + lexer.tokens[0].kind(), + &TokenKind::regular_expression_literal("\\/[^\\/]*\\/*", "gmi") ); } @@ -464,55 +513,73 @@ fn regex_literal_flags() { fn addition_no_spaces() { let mut lexer = Lexer::new("1+1"); lexer.lex().expect("failed to lex"); - assert_eq!(lexer.tokens[0].kind, TokenKind::numeric_literal(1)); - assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Add)); - assert_eq!(lexer.tokens[2].kind, TokenKind::numeric_literal(1)); + assert_eq!(lexer.tokens[0].kind(), &TokenKind::numeric_literal(1)); + assert_eq!( + lexer.tokens[1].kind(), + &TokenKind::Punctuator(Punctuator::Add) + ); + assert_eq!(lexer.tokens[2].kind(), &TokenKind::numeric_literal(1)); } #[test] fn addition_no_spaces_left_side() { let mut lexer = Lexer::new("1+ 1"); lexer.lex().expect("failed to lex"); - assert_eq!(lexer.tokens[0].kind, TokenKind::numeric_literal(1)); - assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Add)); - assert_eq!(lexer.tokens[2].kind, TokenKind::numeric_literal(1)); + assert_eq!(lexer.tokens[0].kind(), &TokenKind::numeric_literal(1)); + assert_eq!( + lexer.tokens[1].kind(), + &TokenKind::Punctuator(Punctuator::Add) + ); + assert_eq!(lexer.tokens[2].kind(), &TokenKind::numeric_literal(1)); } #[test] fn addition_no_spaces_right_side() { let mut lexer = Lexer::new("1 +1"); lexer.lex().expect("failed to lex"); - assert_eq!(lexer.tokens[0].kind, TokenKind::numeric_literal(1)); - assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Add)); - assert_eq!(lexer.tokens[2].kind, TokenKind::numeric_literal(1)); + assert_eq!(lexer.tokens[0].kind(), &TokenKind::numeric_literal(1)); + assert_eq!( + lexer.tokens[1].kind(), + &TokenKind::Punctuator(Punctuator::Add) + ); + assert_eq!(lexer.tokens[2].kind(), &TokenKind::numeric_literal(1)); } #[test] fn addition_no_spaces_e_number_left_side() { let mut lexer = Lexer::new("1e2+ 1"); lexer.lex().expect("failed to lex"); - assert_eq!(lexer.tokens[0].kind, TokenKind::numeric_literal(100.0)); - assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Add)); - assert_eq!(lexer.tokens[2].kind, TokenKind::numeric_literal(1)); + assert_eq!(lexer.tokens[0].kind(), &TokenKind::numeric_literal(100.0)); + assert_eq!( + lexer.tokens[1].kind(), + &TokenKind::Punctuator(Punctuator::Add) + ); + assert_eq!(lexer.tokens[2].kind(), &TokenKind::numeric_literal(1)); } #[test] fn addition_no_spaces_e_number_right_side() { let mut lexer = Lexer::new("1 +1e3"); lexer.lex().expect("failed to lex"); - assert_eq!(lexer.tokens[0].kind, TokenKind::numeric_literal(1)); - assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Add)); - assert_eq!(lexer.tokens[2].kind, TokenKind::numeric_literal(1000.0)); + assert_eq!(lexer.tokens[0].kind(), &TokenKind::numeric_literal(1)); + assert_eq!( + lexer.tokens[1].kind(), + &TokenKind::Punctuator(Punctuator::Add) + ); + assert_eq!(lexer.tokens[2].kind(), &TokenKind::numeric_literal(1000.0)); } #[test] fn addition_no_spaces_e_number() { let mut lexer = Lexer::new("1e3+1e11"); lexer.lex().expect("failed to lex"); - assert_eq!(lexer.tokens[0].kind, TokenKind::numeric_literal(1000.0)); - assert_eq!(lexer.tokens[1].kind, TokenKind::Punctuator(Punctuator::Add)); + assert_eq!(lexer.tokens[0].kind(), &TokenKind::numeric_literal(1000.0)); + assert_eq!( + lexer.tokens[1].kind(), + &TokenKind::Punctuator(Punctuator::Add) + ); assert_eq!( - lexer.tokens[2].kind, - TokenKind::numeric_literal(100_000_000_000.0) + lexer.tokens[2].kind(), + &TokenKind::numeric_literal(100_000_000_000.0) ); } diff --git a/boa/src/syntax/parser/cursor.rs b/boa/src/syntax/parser/cursor.rs index ffc61f3df0f..744b201e86f 100644 --- a/boa/src/syntax/parser/cursor.rs +++ b/boa/src/syntax/parser/cursor.rs @@ -2,8 +2,8 @@ use super::ParseError; use crate::syntax::ast::{ - punc::Punctuator, token::{Token, TokenKind}, + Punctuator, }; /// Token cursor. @@ -45,7 +45,7 @@ impl<'a> Cursor<'a> { if let Some(tk) = token { self.pos += 1; - if tk.kind != TokenKind::LineTerminator { + if tk.kind() != &TokenKind::LineTerminator { break Some(tk); } } else { @@ -63,7 +63,7 @@ impl<'a> Cursor<'a> { count += 1; if let Some(tk) = token { - if tk.kind != TokenKind::LineTerminator { + if tk.kind() != &TokenKind::LineTerminator { if skipped == skip { break Some(tk); } @@ -88,8 +88,8 @@ impl<'a> Cursor<'a> { .tokens .get(self.pos - 1) .expect("token disappeared") - .kind - == TokenKind::LineTerminator + .kind() + == &TokenKind::LineTerminator && self.pos > 0 { self.pos -= 1; @@ -103,7 +103,7 @@ impl<'a> Cursor<'a> { } else { let mut back = 1; let mut tok = self.tokens.get(self.pos - back).expect("token disappeared"); - while self.pos >= back && tok.kind == TokenKind::LineTerminator { + while self.pos >= back && tok.kind() == &TokenKind::LineTerminator { back += 1; tok = self.tokens.get(self.pos - back).expect("token disappeared"); } @@ -126,7 +126,7 @@ impl<'a> Cursor<'a> { let next_token = self.next().ok_or(ParseError::AbruptEnd)?; let kind = kind.into(); - if next_token.kind == kind { + if next_token.kind() == &kind { Ok(()) } else { Err(ParseError::Expected( @@ -144,7 +144,7 @@ impl<'a> Cursor<'a> { /// [spec]: https://tc39.es/ecma262/#sec-automatic-semicolon-insertion pub(super) fn peek_semicolon(&self, do_while: bool) -> (bool, Option<&Token>) { match self.tokens.get(self.pos) { - Some(tk) => match tk.kind { + Some(tk) => match *tk.kind() { TokenKind::Punctuator(Punctuator::Semicolon) => (true, Some(tk)), TokenKind::LineTerminator | TokenKind::Punctuator(Punctuator::CloseBlock) => { (true, Some(tk)) @@ -160,7 +160,7 @@ impl<'a> Cursor<'a> { .tokens .get(self.pos - 1) .expect("could not find previous token"); - if tok.kind == TokenKind::Punctuator(Punctuator::CloseParen) { + if tok.kind() == &TokenKind::Punctuator(Punctuator::CloseParen) { return (true, Some(tk)); } } @@ -183,7 +183,7 @@ impl<'a> Cursor<'a> { routine: &'static str, ) -> Result<(), ParseError> { match self.peek_semicolon(do_while) { - (true, Some(tk)) => match tk.kind { + (true, Some(tk)) => match *tk.kind() { TokenKind::Punctuator(Punctuator::Semicolon) | TokenKind::LineTerminator => { self.pos += 1; Ok(()) @@ -214,11 +214,11 @@ impl<'a> Cursor<'a> { let token = self.tokens.get(self.pos + count); count += 1; if let Some(tk) = token { - if skipped == skip && tk.kind == TokenKind::LineTerminator { + if skipped == skip && tk.kind() == &TokenKind::LineTerminator { break Err(ParseError::Unexpected(tk.clone(), Some(routine))); - } else if skipped == skip && tk.kind != TokenKind::LineTerminator { + } else if skipped == skip && tk.kind() != &TokenKind::LineTerminator { break Ok(()); - } else if tk.kind != TokenKind::LineTerminator { + } else if tk.kind() != &TokenKind::LineTerminator { skipped += 1; } } else { @@ -237,7 +237,7 @@ impl<'a> Cursor<'a> { { let next_token = self.peek(0)?; - if next_token.kind == kind.into() { + if next_token.kind() == &kind.into() { self.next() } else { None diff --git a/boa/src/syntax/parser/error.rs b/boa/src/syntax/parser/error.rs index 1d83c95357a..14adec452fd 100644 --- a/boa/src/syntax/parser/error.rs +++ b/boa/src/syntax/parser/error.rs @@ -1,9 +1,8 @@ //! Error and result implementation for the parser. use crate::syntax::ast::{ - keyword::Keyword, - node::Node, - pos::Position, + position::Position, token::{Token, TokenKind}, + Keyword, Node, }; use std::fmt; @@ -64,18 +63,23 @@ impl fmt::Display for ParseError { }, actual, routine, - actual.pos.line_number, - actual.pos.column_number + actual.span().start().line_number(), + actual.span().start().column_number() ), Self::ExpectedExpr(expected, actual, pos) => write!( f, "Expected expression '{}', got '{}' at line {}, col {}", - expected, actual, pos.line_number, pos.column_number + expected, + actual, + pos.line_number(), + pos.column_number() ), Self::UnexpectedKeyword(keyword, pos) => write!( f, "Unexpected keyword: '{}' at line {}, col {}", - keyword, pos.line_number, pos.column_number + keyword, + pos.line_number(), + pos.column_number() ), Self::Unexpected(tok, msg) => write!( f, @@ -86,8 +90,8 @@ impl fmt::Display for ParseError { } else { String::new() }, - tok.pos.line_number, - tok.pos.column_number + tok.span().start().line_number(), + tok.span().start().column_number() ), Self::AbruptEnd => write!(f, "Abrupt End"), Self::General(msg, pos) => write!( @@ -95,7 +99,11 @@ impl fmt::Display for ParseError { "{}{}", msg, if let Some(pos) = pos { - format!(" at line {}, col {}", pos.line_number, pos.column_number) + format!( + " at line {}, col {}", + pos.line_number(), + pos.column_number() + ) } else { String::new() } diff --git a/boa/src/syntax/parser/expression/assignment/arrow_function.rs b/boa/src/syntax/parser/expression/assignment/arrow_function.rs index 2d44f6cedb2..fff5d52a3c4 100644 --- a/boa/src/syntax/parser/expression/assignment/arrow_function.rs +++ b/boa/src/syntax/parser/expression/assignment/arrow_function.rs @@ -10,9 +10,8 @@ use super::AssignmentExpression; use crate::syntax::{ ast::{ - node::{ArrowFunctionDecl, FormalParameter, Node}, - punc::Punctuator, - token::TokenKind, + node::{ArrowFunctionDecl, Block, FormalParameter, Node}, + Punctuator, TokenKind, }, parser::{ function::{FormalParameters, FunctionBody}, @@ -61,12 +60,12 @@ impl TokenParser for ArrowFunction { fn parse(self, cursor: &mut Cursor<'_>) -> Result { let next_token = cursor.peek(0).ok_or(ParseError::AbruptEnd)?; - let params = if let TokenKind::Punctuator(Punctuator::OpenParen) = &next_token.kind { + let params = if let TokenKind::Punctuator(Punctuator::OpenParen) = &next_token.kind() { // CoverParenthesizedExpressionAndArrowParameterList cursor.expect(Punctuator::OpenParen, "arrow function")?; let params = FormalParameters::new(self.allow_yield, self.allow_await).parse(cursor)?; cursor.expect(Punctuator::CloseParen, "arrow function")?; - params.into_boxed_slice() + params } else { let param = BindingIdentifier::new(self.allow_yield, self.allow_await) .parse(cursor) @@ -86,7 +85,7 @@ impl TokenParser for ArrowFunction { let body = ConciseBody::new(self.allow_in).parse(cursor)?; - Ok(ArrowFunctionDecl::new(params, body)) + Ok(ArrowFunctionDecl::new(params, vec![body])) } } @@ -112,12 +111,10 @@ impl TokenParser for ConciseBody { type Output = Node; fn parse(self, cursor: &mut Cursor<'_>) -> Result { - match cursor.peek(0).ok_or(ParseError::AbruptEnd)?.kind { + match cursor.peek(0).ok_or(ParseError::AbruptEnd)?.kind() { TokenKind::Punctuator(Punctuator::OpenBlock) => { let _ = cursor.next(); - let body = FunctionBody::new(false, false) - .parse(cursor) - .map(Node::statement_list)?; + let body = Block::from(FunctionBody::new(false, false).parse(cursor)?).into(); cursor.expect(Punctuator::CloseBlock, "arrow function")?; Ok(body) } diff --git a/boa/src/syntax/parser/expression/assignment/conditional.rs b/boa/src/syntax/parser/expression/assignment/conditional.rs index 874aa950c9d..b55d063c273 100644 --- a/boa/src/syntax/parser/expression/assignment/conditional.rs +++ b/boa/src/syntax/parser/expression/assignment/conditional.rs @@ -8,7 +8,7 @@ //! [spec]: https://tc39.es/ecma262/#sec-conditional-operator use crate::syntax::{ - ast::{node::Node, punc::Punctuator, token::TokenKind}, + ast::{Node, Punctuator, TokenKind}, parser::{ expression::{AssignmentExpression, LogicalORExpression}, AllowAwait, AllowIn, AllowYield, Cursor, ParseResult, TokenParser, @@ -59,7 +59,7 @@ impl TokenParser for ConditionalExpression { .parse(cursor)?; if let Some(tok) = cursor.next() { - if tok.kind == TokenKind::Punctuator(Punctuator::Question) { + if tok.kind() == &TokenKind::Punctuator(Punctuator::Question) { let then_clause = AssignmentExpression::new(self.allow_in, self.allow_yield, self.allow_await) .parse(cursor)?; diff --git a/boa/src/syntax/parser/expression/assignment/exponentiation.rs b/boa/src/syntax/parser/expression/assignment/exponentiation.rs index 8acc765ebbd..78a5f9a4b8f 100644 --- a/boa/src/syntax/parser/expression/assignment/exponentiation.rs +++ b/boa/src/syntax/parser/expression/assignment/exponentiation.rs @@ -9,11 +9,9 @@ use crate::syntax::{ ast::{ - keyword::Keyword, node::{BinOp, Node}, op::NumOp, - punc::Punctuator, - token::TokenKind, + Keyword, Punctuator, TokenKind, }, parser::{ expression::{unary::UnaryExpression, update::UpdateExpression}, @@ -53,7 +51,7 @@ impl ExponentiationExpression { /// Checks by looking at the next token to see whether it's a unary operator or not. fn is_unary_expression(cursor: &mut Cursor<'_>) -> bool { if let Some(tok) = cursor.peek(0) { - match tok.kind { + match tok.kind() { TokenKind::Keyword(Keyword::Delete) | TokenKind::Keyword(Keyword::Void) | TokenKind::Keyword(Keyword::TypeOf) @@ -79,7 +77,7 @@ impl TokenParser for ExponentiationExpression { let lhs = UpdateExpression::new(self.allow_yield, self.allow_await).parse(cursor)?; if let Some(tok) = cursor.next() { - if let TokenKind::Punctuator(Punctuator::Exp) = tok.kind { + if let TokenKind::Punctuator(Punctuator::Exp) = tok.kind() { return Ok(Node::from(BinOp::new(NumOp::Exp, lhs, self.parse(cursor)?))); } else { cursor.back(); diff --git a/boa/src/syntax/parser/expression/assignment/mod.rs b/boa/src/syntax/parser/expression/assignment/mod.rs index e11e1b82b89..a3d652324f3 100644 --- a/boa/src/syntax/parser/expression/assignment/mod.rs +++ b/boa/src/syntax/parser/expression/assignment/mod.rs @@ -14,10 +14,8 @@ mod exponentiation; use self::{arrow_function::ArrowFunction, conditional::ConditionalExpression}; use crate::syntax::{ ast::{ - keyword::Keyword, node::{Assign, BinOp, Node}, - punc::Punctuator, - token::TokenKind, + Keyword, Punctuator, TokenKind, }, parser::{AllowAwait, AllowIn, AllowYield, Cursor, ParseError, ParseResult, TokenParser}, }; @@ -74,7 +72,7 @@ impl TokenParser for AssignmentExpression { fn parse(self, cursor: &mut Cursor<'_>) -> ParseResult { // Arrow function let next_token = cursor.peek(0).ok_or(ParseError::AbruptEnd)?; - match next_token.kind { + match next_token.kind() { // a=>{} TokenKind::Identifier(_) | TokenKind::Keyword(Keyword::Yield) @@ -84,7 +82,7 @@ impl TokenParser for AssignmentExpression { .is_ok() => { if let Some(tok) = cursor.peek(1) { - if tok.kind == TokenKind::Punctuator(Punctuator::Arrow) { + if tok.kind() == &TokenKind::Punctuator(Punctuator::Arrow) { return ArrowFunction::new( self.allow_in, self.allow_yield, @@ -112,7 +110,7 @@ impl TokenParser for AssignmentExpression { .parse(cursor)?; if let Some(tok) = cursor.next() { - match tok.kind { + match tok.kind() { TokenKind::Punctuator(Punctuator::Assign) => { lhs = Node::from(Assign::new(lhs, self.parse(cursor)?)); } diff --git a/boa/src/syntax/parser/expression/left_hand_side/arguments.rs b/boa/src/syntax/parser/expression/left_hand_side/arguments.rs index 330b804c7dc..1d055e6fad6 100644 --- a/boa/src/syntax/parser/expression/left_hand_side/arguments.rs +++ b/boa/src/syntax/parser/expression/left_hand_side/arguments.rs @@ -8,7 +8,7 @@ //! [spec]: https://tc39.es/ecma262/#prod-Arguments use crate::syntax::{ - ast::{node::Node, punc::Punctuator, token::TokenKind}, + ast::{Node, Punctuator, TokenKind}, parser::{ expression::AssignmentExpression, AllowAwait, AllowYield, Cursor, ParseError, TokenParser, }, @@ -50,7 +50,7 @@ impl TokenParser for Arguments { let mut args = Vec::new(); loop { let next_token = cursor.next().ok_or(ParseError::AbruptEnd)?; - match next_token.kind { + match next_token.kind() { TokenKind::Punctuator(Punctuator::CloseParen) => break, TokenKind::Punctuator(Punctuator::Comma) => { if args.is_empty() { diff --git a/boa/src/syntax/parser/expression/left_hand_side/call.rs b/boa/src/syntax/parser/expression/left_hand_side/call.rs index 274512c3827..67336829201 100644 --- a/boa/src/syntax/parser/expression/left_hand_side/call.rs +++ b/boa/src/syntax/parser/expression/left_hand_side/call.rs @@ -9,7 +9,7 @@ use super::arguments::Arguments; use crate::syntax::{ - ast::{node::Node, punc::Punctuator, token::TokenKind}, + ast::{Node, Punctuator, TokenKind}, parser::{ expression::Expression, AllowAwait, AllowYield, Cursor, ParseError, ParseResult, TokenParser, @@ -49,7 +49,7 @@ impl TokenParser for CallExpression { fn parse(self, cursor: &mut Cursor<'_>) -> ParseResult { let mut lhs = match cursor.peek(0) { - Some(tk) if tk.kind == TokenKind::Punctuator(Punctuator::OpenParen) => { + Some(tk) if tk.kind() == &TokenKind::Punctuator(Punctuator::OpenParen) => { let args = Arguments::new(self.allow_yield, self.allow_await).parse(cursor)?; Node::call(self.first_member_expr, args) } @@ -64,14 +64,14 @@ impl TokenParser for CallExpression { }; while let Some(tok) = cursor.peek(0) { - match tok.kind { + match tok.kind() { TokenKind::Punctuator(Punctuator::OpenParen) => { let args = Arguments::new(self.allow_yield, self.allow_await).parse(cursor)?; lhs = Node::call(lhs, args); } TokenKind::Punctuator(Punctuator::Dot) => { let _ = cursor.next().ok_or(ParseError::AbruptEnd)?; // We move the cursor. - match &cursor.next().ok_or(ParseError::AbruptEnd)?.kind { + match &cursor.next().ok_or(ParseError::AbruptEnd)?.kind() { TokenKind::Identifier(name) => { lhs = Node::get_const_field(lhs, name.clone().into_boxed_str()); } diff --git a/boa/src/syntax/parser/expression/left_hand_side/member.rs b/boa/src/syntax/parser/expression/left_hand_side/member.rs index 5c3362ffb83..7f9b0abb2e7 100644 --- a/boa/src/syntax/parser/expression/left_hand_side/member.rs +++ b/boa/src/syntax/parser/expression/left_hand_side/member.rs @@ -7,7 +7,7 @@ use super::arguments::Arguments; use crate::syntax::{ - ast::{keyword::Keyword, node::Node, punc::Punctuator, token::TokenKind}, + ast::{Keyword, Node, Punctuator, TokenKind}, parser::{ expression::{primary::PrimaryExpression, Expression}, AllowAwait, AllowYield, Cursor, ParseError, ParseResult, TokenParser, @@ -44,8 +44,8 @@ impl TokenParser for MemberExpression { type Output = Node; fn parse(self, cursor: &mut Cursor<'_>) -> ParseResult { - let mut lhs = if cursor.peek(0).ok_or(ParseError::AbruptEnd)?.kind - == TokenKind::Keyword(Keyword::New) + let mut lhs = if cursor.peek(0).ok_or(ParseError::AbruptEnd)?.kind() + == &TokenKind::Keyword(Keyword::New) { let _ = cursor.next().expect("keyword disappeared"); let lhs = self.parse(cursor)?; @@ -57,10 +57,10 @@ impl TokenParser for MemberExpression { PrimaryExpression::new(self.allow_yield, self.allow_await).parse(cursor)? }; while let Some(tok) = cursor.peek(0) { - match &tok.kind { + match &tok.kind() { TokenKind::Punctuator(Punctuator::Dot) => { let _ = cursor.next().ok_or(ParseError::AbruptEnd)?; // We move the cursor forward. - match &cursor.next().ok_or(ParseError::AbruptEnd)?.kind { + match &cursor.next().ok_or(ParseError::AbruptEnd)?.kind() { TokenKind::Identifier(name) => { lhs = Node::get_const_field(lhs, name.clone().into_boxed_str()) } diff --git a/boa/src/syntax/parser/expression/left_hand_side/mod.rs b/boa/src/syntax/parser/expression/left_hand_side/mod.rs index 470184276a1..461ed9b6c13 100644 --- a/boa/src/syntax/parser/expression/left_hand_side/mod.rs +++ b/boa/src/syntax/parser/expression/left_hand_side/mod.rs @@ -13,7 +13,7 @@ mod member; use self::{call::CallExpression, member::MemberExpression}; use crate::syntax::{ - ast::{node::Node, punc::Punctuator, token::TokenKind}, + ast::{Node, Punctuator, TokenKind}, parser::{AllowAwait, AllowYield, Cursor, ParseResult, TokenParser}, }; @@ -52,7 +52,7 @@ impl TokenParser for LeftHandSideExpression { // TODO: Implement NewExpression: new MemberExpression let lhs = MemberExpression::new(self.allow_yield, self.allow_await).parse(cursor)?; match cursor.peek(0) { - Some(ref tok) if tok.kind == TokenKind::Punctuator(Punctuator::OpenParen) => { + Some(ref tok) if tok.kind() == &TokenKind::Punctuator(Punctuator::OpenParen) => { CallExpression::new(self.allow_yield, self.allow_await, lhs).parse(cursor) } _ => Ok(lhs), // TODO: is this correct? diff --git a/boa/src/syntax/parser/expression/mod.rs b/boa/src/syntax/parser/expression/mod.rs index 6d33ea4a07c..f3a4d777536 100644 --- a/boa/src/syntax/parser/expression/mod.rs +++ b/boa/src/syntax/parser/expression/mod.rs @@ -19,10 +19,8 @@ use self::assignment::ExponentiationExpression; pub(super) use self::{assignment::AssignmentExpression, primary::Initializer}; use super::{AllowAwait, AllowIn, AllowYield, Cursor, ParseResult, TokenParser}; use crate::syntax::ast::{ - keyword::Keyword, node::{BinOp, Node}, - punc::Punctuator, - token::TokenKind, + Keyword, Punctuator, TokenKind, }; // For use in the expression! macro to allow for both Punctuator and Keyword parameters. @@ -55,8 +53,8 @@ macro_rules! expression { ($name:ident, $lower:ident, [$( $op:path ),*], [$( $lo fn parse(self, cursor: &mut Cursor<'_>) -> ParseResult { let mut lhs = $lower::new($( self.$low_param ),*).parse(cursor)?; while let Some(tok) = cursor.peek(0) { - match tok.kind { - TokenKind::Punctuator(op) if $( op == $op )||* => { + match tok.kind() { + TokenKind::Punctuator(op) if $( *op == $op )||* => { let _ = cursor.next().expect("token disappeared"); lhs = Node::from(BinOp::new( op.as_binop().expect("Could not get binary operation."), @@ -64,7 +62,7 @@ macro_rules! expression { ($name:ident, $lower:ident, [$( $op:path ),*], [$( $lo $lower::new($( self.$low_param ),*).parse(cursor)? )); } - TokenKind::Keyword(op) if $( op == $op )||* => { + TokenKind::Keyword(op) if $( *op == $op )||* => { let _ = cursor.next().expect("token disappeared"); lhs = Node::from(BinOp::new( op.as_binop().expect("Could not get binary operation."), diff --git a/boa/src/syntax/parser/expression/primary/array_initializer/mod.rs b/boa/src/syntax/parser/expression/primary/array_initializer/mod.rs index e34ba511ac0..7a92f23ffee 100644 --- a/boa/src/syntax/parser/expression/primary/array_initializer/mod.rs +++ b/boa/src/syntax/parser/expression/primary/array_initializer/mod.rs @@ -14,7 +14,7 @@ use crate::syntax::{ ast::{ constant::Const, node::{ArrayDecl, Node}, - punc::Punctuator, + Punctuator, }, parser::{ expression::AssignmentExpression, AllowAwait, AllowYield, Cursor, ParseError, TokenParser, diff --git a/boa/src/syntax/parser/expression/primary/function_expression.rs b/boa/src/syntax/parser/expression/primary/function_expression.rs index dc2a6307c6d..f27bcab26b8 100644 --- a/boa/src/syntax/parser/expression/primary/function_expression.rs +++ b/boa/src/syntax/parser/expression/primary/function_expression.rs @@ -8,11 +8,11 @@ //! [spec]: https://tc39.es/ecma262/#prod-FunctionExpression use crate::syntax::{ - ast::{node::Node, punc::Punctuator}, + ast::{node::FunctionExpr, Punctuator}, parser::{ function::{FormalParameters, FunctionBody}, statement::BindingIdentifier, - Cursor, ParseResult, TokenParser, + Cursor, ParseError, TokenParser, }, }; @@ -28,9 +28,9 @@ use crate::syntax::{ pub(super) struct FunctionExpression; impl TokenParser for FunctionExpression { - type Output = Node; + type Output = FunctionExpr; - fn parse(self, cursor: &mut Cursor<'_>) -> ParseResult { + fn parse(self, cursor: &mut Cursor<'_>) -> Result { let name = BindingIdentifier::new(false, false).try_parse(cursor); cursor.expect(Punctuator::OpenParen, "function expression")?; @@ -40,12 +40,10 @@ impl TokenParser for FunctionExpression { cursor.expect(Punctuator::CloseParen, "function expression")?; cursor.expect(Punctuator::OpenBlock, "function expression")?; - let body = FunctionBody::new(false, false) - .parse(cursor) - .map(Node::statement_list)?; + let body = FunctionBody::new(false, false).parse(cursor)?; cursor.expect(Punctuator::CloseBlock, "function expression")?; - Ok(Node::function_expr::<_, Box<_>, _, _>(name, params, body)) + Ok(FunctionExpr::new(name, params, body)) } } diff --git a/boa/src/syntax/parser/expression/primary/mod.rs b/boa/src/syntax/parser/expression/primary/mod.rs index 627d5e2ac48..ee77eea0f12 100644 --- a/boa/src/syntax/parser/expression/primary/mod.rs +++ b/boa/src/syntax/parser/expression/primary/mod.rs @@ -21,11 +21,9 @@ use super::Expression; use crate::syntax::{ ast::{ constant::Const, - keyword::Keyword, node::{Local, Node}, - punc::Punctuator, token::NumericLiteral, - token::TokenKind, + Keyword, Punctuator, TokenKind, }, parser::{AllowAwait, AllowYield, Cursor, ParseError, ParseResult, TokenParser}, }; @@ -65,10 +63,12 @@ impl TokenParser for PrimaryExpression { fn parse(self, cursor: &mut Cursor<'_>) -> ParseResult { let tok = cursor.next().ok_or(ParseError::AbruptEnd)?; - match &tok.kind { + match &tok.kind() { TokenKind::Keyword(Keyword::This) => Ok(Node::This), // TokenKind::Keyword(Keyword::Arguments) => Ok(Node::new(NodeBase::Arguments, tok.pos)), - TokenKind::Keyword(Keyword::Function) => FunctionExpression.parse(cursor), + TokenKind::Keyword(Keyword::Function) => { + FunctionExpression.parse(cursor).map(Node::from) + } TokenKind::Punctuator(Punctuator::OpenParen) => { let expr = Expression::new(true, self.allow_yield, self.allow_await).parse(cursor)?; diff --git a/boa/src/syntax/parser/expression/primary/object_initializer/mod.rs b/boa/src/syntax/parser/expression/primary/object_initializer/mod.rs index a4e22f630d1..b562eb4a32b 100644 --- a/boa/src/syntax/parser/expression/primary/object_initializer/mod.rs +++ b/boa/src/syntax/parser/expression/primary/object_initializer/mod.rs @@ -12,9 +12,9 @@ mod tests; use crate::syntax::{ ast::{ - node::{self, MethodDefinitionKind, Node}, - punc::Punctuator, + node::{self, FunctionExpr, MethodDefinitionKind, Node}, token::{Token, TokenKind}, + Punctuator, }, parser::{ expression::AssignmentExpression, @@ -143,7 +143,7 @@ impl TokenParser for PropertyDefinition { let pos = cursor .peek(0) - .map(|tok| tok.pos) + .map(|tok| tok.span().start()) .ok_or(ParseError::AbruptEnd)?; Err(ParseError::General( "expected property definition", @@ -231,9 +231,7 @@ impl TokenParser for MethodDefinition { TokenKind::Punctuator(Punctuator::OpenBlock), "property method definition", )?; - let body = FunctionBody::new(false, false) - .parse(cursor) - .map(Node::statement_list)?; + let body = FunctionBody::new(false, false).parse(cursor)?; cursor.expect( TokenKind::Punctuator(Punctuator::CloseBlock), "property method definition", @@ -242,7 +240,7 @@ impl TokenParser for MethodDefinition { Ok(node::PropertyDefinition::method_definition( methodkind, prop_name, - Node::function_expr::<_, String, _, _>(None, params, body), + FunctionExpr::new(None, params, body), )) } } diff --git a/boa/src/syntax/parser/expression/primary/object_initializer/tests.rs b/boa/src/syntax/parser/expression/primary/object_initializer/tests.rs index acf8a6ef3cf..19149198538 100644 --- a/boa/src/syntax/parser/expression/primary/object_initializer/tests.rs +++ b/boa/src/syntax/parser/expression/primary/object_initializer/tests.rs @@ -1,5 +1,5 @@ use crate::syntax::{ - ast::node::{FormalParameter, MethodDefinitionKind, Node, PropertyDefinition}, + ast::node::{FormalParameter, FunctionExpr, MethodDefinitionKind, Node, PropertyDefinition}, parser::tests::check_parser, }; @@ -32,7 +32,7 @@ fn check_object_short_function() { PropertyDefinition::method_definition( MethodDefinitionKind::Ordinary, "b", - Node::function_expr::<_, String, _, _>(None, Vec::new(), Node::statement_list(vec![])), + FunctionExpr::new(None, Vec::new(), Vec::new()), ), ]; @@ -57,10 +57,10 @@ fn check_object_short_function_arguments() { PropertyDefinition::method_definition( MethodDefinitionKind::Ordinary, "b", - Node::FunctionExpr( + FunctionExpr::new( None, - Box::new([FormalParameter::new("test", None, false)]), - Box::new(Node::StatementList(Box::new([]))), + vec![FormalParameter::new("test", None, false)], + Vec::new(), ), ), ]; @@ -85,11 +85,7 @@ fn check_object_getter() { PropertyDefinition::method_definition( MethodDefinitionKind::Get, "b", - Node::FunctionExpr( - None, - Box::new([]), - Box::new(Node::statement_list(Vec::new())), - ), + FunctionExpr::new(None, Vec::new(), Vec::new()), ), ]; @@ -113,10 +109,10 @@ fn check_object_setter() { PropertyDefinition::method_definition( MethodDefinitionKind::Set, "b", - Node::function_expr::<_, String, _, _>( + FunctionExpr::new( None, vec![FormalParameter::new("test", None, false)], - Node::statement_list(Vec::new()), + Vec::new(), ), ), ]; diff --git a/boa/src/syntax/parser/expression/primary/tests.rs b/boa/src/syntax/parser/expression/primary/tests.rs index 19f76af5bea..cb05481cfa8 100644 --- a/boa/src/syntax/parser/expression/primary/tests.rs +++ b/boa/src/syntax/parser/expression/primary/tests.rs @@ -1,4 +1,4 @@ -use crate::syntax::{ast::node::Node, parser::tests::check_parser}; +use crate::syntax::{ast::Node, parser::tests::check_parser}; #[test] fn check_string() { diff --git a/boa/src/syntax/parser/expression/unary.rs b/boa/src/syntax/parser/expression/unary.rs index 14a2c7a29be..ffd24c2fa7e 100644 --- a/boa/src/syntax/parser/expression/unary.rs +++ b/boa/src/syntax/parser/expression/unary.rs @@ -8,7 +8,7 @@ //! [spec]: https://tc39.es/ecma262/#sec-unary-operators use crate::syntax::{ - ast::{keyword::Keyword, node::Node, op::UnaryOp, punc::Punctuator, token::TokenKind}, + ast::{op::UnaryOp, Keyword, Node, Punctuator, TokenKind}, parser::{ expression::update::UpdateExpression, AllowAwait, AllowYield, Cursor, ParseError, ParseResult, TokenParser, @@ -48,7 +48,7 @@ impl TokenParser for UnaryExpression { fn parse(self, cursor: &mut Cursor<'_>) -> ParseResult { let tok = cursor.next().ok_or(ParseError::AbruptEnd)?; - match tok.kind { + match tok.kind() { TokenKind::Keyword(Keyword::Delete) => { Ok(Node::unary_op(UnaryOp::Delete, self.parse(cursor)?)) } diff --git a/boa/src/syntax/parser/expression/update.rs b/boa/src/syntax/parser/expression/update.rs index 5ba3d839e94..1ed38e62e17 100644 --- a/boa/src/syntax/parser/expression/update.rs +++ b/boa/src/syntax/parser/expression/update.rs @@ -7,7 +7,7 @@ use super::left_hand_side::LeftHandSideExpression; use crate::syntax::{ - ast::{node::Node, op::UnaryOp, punc::Punctuator, token::TokenKind}, + ast::{op::UnaryOp, Node, Punctuator, TokenKind}, parser::{AllowAwait, AllowYield, Cursor, ParseError, ParseResult, TokenParser}, }; @@ -42,7 +42,7 @@ impl TokenParser for UpdateExpression { fn parse(self, cursor: &mut Cursor<'_>) -> ParseResult { let tok = cursor.peek(0).ok_or(ParseError::AbruptEnd)?; - match tok.kind { + match tok.kind() { TokenKind::Punctuator(Punctuator::Inc) => { cursor.next().expect("token disappeared"); return Ok(Node::unary_op( @@ -64,7 +64,7 @@ impl TokenParser for UpdateExpression { let lhs = LeftHandSideExpression::new(self.allow_yield, self.allow_await).parse(cursor)?; if let Some(tok) = cursor.peek(0) { - match tok.kind { + match tok.kind() { TokenKind::Punctuator(Punctuator::Inc) => { cursor.next().expect("token disappeared"); return Ok(Node::unary_op(UnaryOp::IncrementPost, lhs)); diff --git a/boa/src/syntax/parser/function/mod.rs b/boa/src/syntax/parser/function/mod.rs index 3402b213e19..d8058ddee23 100644 --- a/boa/src/syntax/parser/function/mod.rs +++ b/boa/src/syntax/parser/function/mod.rs @@ -12,9 +12,8 @@ mod tests; use crate::syntax::{ ast::{ - node::{self, Node}, - punc::Punctuator, - token::TokenKind, + node::{self}, + Punctuator, TokenKind, }, parser::{ expression::Initializer, @@ -52,15 +51,15 @@ impl FormalParameters { } impl TokenParser for FormalParameters { - type Output = Vec; + type Output = Box<[node::FormalParameter]>; fn parse(self, cursor: &mut Cursor<'_>) -> Result { let mut params = Vec::new(); - if cursor.peek(0).ok_or(ParseError::AbruptEnd)?.kind - == TokenKind::Punctuator(Punctuator::CloseParen) + if cursor.peek(0).ok_or(ParseError::AbruptEnd)?.kind() + == &TokenKind::Punctuator(Punctuator::CloseParen) { - return Ok(params); + return Ok(params.into_boxed_slice()); } loop { @@ -73,8 +72,8 @@ impl TokenParser for FormalParameters { FormalParameter::new(self.allow_yield, self.allow_await).parse(cursor)? }); - if cursor.peek(0).ok_or(ParseError::AbruptEnd)?.kind - == TokenKind::Punctuator(Punctuator::CloseParen) + if cursor.peek(0).ok_or(ParseError::AbruptEnd)?.kind() + == &TokenKind::Punctuator(Punctuator::CloseParen) { break; } @@ -92,7 +91,7 @@ impl TokenParser for FormalParameters { cursor.expect(Punctuator::Comma, "parameter list")?; } - Ok(params) + Ok(params.into_boxed_slice()) } } @@ -225,12 +224,12 @@ impl FunctionStatementList { } impl TokenParser for FunctionStatementList { - type Output = Vec; + type Output = node::StatementList; fn parse(self, cursor: &mut Cursor<'_>) -> Result { if let Some(tk) = cursor.peek(0) { - if tk.kind == Punctuator::CloseBlock.into() { - return Ok(Vec::new()); + if tk.kind() == &Punctuator::CloseBlock.into() { + return Ok(Vec::new().into()); } } diff --git a/boa/src/syntax/parser/function/tests.rs b/boa/src/syntax/parser/function/tests.rs index 5a9dfb22187..06deb570fe1 100644 --- a/boa/src/syntax/parser/function/tests.rs +++ b/boa/src/syntax/parser/function/tests.rs @@ -1,5 +1,5 @@ use crate::syntax::{ - ast::node::{ArrowFunctionDecl, BinOp, FormalParameter, Local, Node}, + ast::node::{ArrowFunctionDecl, BinOp, FormalParameter, FunctionDecl, Local, Node}, ast::op::NumOp, parser::tests::check_parser, }; @@ -9,11 +9,12 @@ use crate::syntax::{ fn check_basic() { check_parser( "function foo(a) { return a; }", - vec![Node::function_decl( - "foo", + vec![FunctionDecl::new( + Box::from("foo"), vec![FormalParameter::new("a", None, false)], - Node::statement_list(vec![Node::return_node(Node::from(Local::from("a")))]), - )], + vec![Node::return_node(Node::from(Local::from("a")))], + ) + .into()], ); } @@ -22,11 +23,12 @@ fn check_basic() { fn check_basic_semicolon_insertion() { check_parser( "function foo(a) { return a }", - vec![Node::function_decl( - "foo", + vec![FunctionDecl::new( + Box::from("foo"), vec![FormalParameter::new("a", None, false)], - Node::statement_list(vec![Node::return_node(Node::from(Local::from("a")))]), - )], + vec![Node::return_node(Node::from(Local::from("a")))], + ) + .into()], ); } @@ -35,11 +37,12 @@ fn check_basic_semicolon_insertion() { fn check_empty_return() { check_parser( "function foo(a) { return; }", - vec![Node::function_decl( - "foo", + vec![FunctionDecl::new( + Box::from("foo"), vec![FormalParameter::new("a", None, false)], - Node::statement_list(vec![Node::Return(None)]), - )], + vec![Node::Return(None)], + ) + .into()], ); } @@ -48,11 +51,12 @@ fn check_empty_return() { fn check_empty_return_semicolon_insertion() { check_parser( "function foo(a) { return }", - vec![Node::function_decl( - "foo", + vec![FunctionDecl::new( + Box::from("foo"), vec![FormalParameter::new("a", None, false)], - Node::statement_list(vec![Node::Return(None)]), - )], + vec![Node::Return(None)], + ) + .into()], ); } @@ -61,14 +65,15 @@ fn check_empty_return_semicolon_insertion() { fn check_rest_operator() { check_parser( "function foo(a, ...b) {}", - vec![Node::function_decl( - "foo", + vec![FunctionDecl::new( + Box::from("foo"), vec![ FormalParameter::new("a", None, false), FormalParameter::new("b", None, true), ], - Node::StatementList(Box::new([])), - )], + vec![], + ) + .into()], ); } @@ -77,11 +82,7 @@ fn check_rest_operator() { fn check_arrow_only_rest() { check_parser( "(...a) => {}", - vec![ArrowFunctionDecl::new( - vec![FormalParameter::new("a", None, true)], - Node::StatementList(Box::new([])), - ) - .into()], + vec![ArrowFunctionDecl::new(vec![FormalParameter::new("a", None, true)], vec![]).into()], ); } @@ -96,7 +97,7 @@ fn check_arrow_rest() { FormalParameter::new("b", None, false), FormalParameter::new("c", None, true), ], - Node::StatementList(Box::new([])), + vec![], ) .into()], ); @@ -112,11 +113,11 @@ fn check_arrow() { FormalParameter::new("a", None, false), FormalParameter::new("b", None, false), ], - Node::statement_list(vec![Node::return_node(Node::from(BinOp::new( + vec![Node::return_node(Node::from(BinOp::new( NumOp::Add, Local::from("a"), Local::from("b"), - )))]), + )))], ) .into()], ); @@ -132,11 +133,11 @@ fn check_arrow_semicolon_insertion() { FormalParameter::new("a", None, false), FormalParameter::new("b", None, false), ], - Node::statement_list(vec![Node::return_node(Node::from(BinOp::new( + vec![Node::return_node(Node::from(BinOp::new( NumOp::Add, Local::from("a"), Local::from("b"), - )))]), + )))], ) .into()], ); @@ -152,7 +153,7 @@ fn check_arrow_epty_return() { FormalParameter::new("a", None, false), FormalParameter::new("b", None, false), ], - Node::statement_list(vec![Node::Return(None)]), + vec![Node::Return(None)], ) .into()], ); @@ -168,7 +169,7 @@ fn check_arrow_empty_return_semicolon_insertion() { FormalParameter::new("a", None, false), FormalParameter::new("b", None, false), ], - Node::statement_list(vec![Node::Return(None)]), + vec![Node::Return(None)], ) .into()], ); diff --git a/boa/src/syntax/parser/mod.rs b/boa/src/syntax/parser/mod.rs index bfbb6798cb2..79968cd7d3a 100644 --- a/boa/src/syntax/parser/mod.rs +++ b/boa/src/syntax/parser/mod.rs @@ -9,10 +9,7 @@ mod statement; mod tests; use self::error::{ParseError, ParseResult}; -use crate::syntax::ast::{ - node::{StatementList, VarDecl}, - token::Token, -}; +use crate::syntax::ast::{node::StatementList, Token}; use cursor::Cursor; /// Trait implemented by parsers. @@ -106,7 +103,7 @@ impl<'a> Parser<'a> { } /// Parse all expressions in the token array - pub fn parse_all(&mut self) -> (VarDecl, StatementList) { + pub fn parse_all(&mut self) -> Result { Script.parse(&mut self.cursor) } } @@ -121,16 +118,13 @@ impl<'a> Parser<'a> { pub struct Script; impl TokenParser for Script { - type Output = (VarDecl, StatementList); + type Output = StatementList; fn parse(self, cursor: &mut Cursor<'_>) -> Result { if cursor.peek(0).is_some() { ScriptBody.parse(cursor) } else { - Ok(( - Vec::new().into(), - StatementList::new(Vec::new(), Vec::new()), - )) + Ok(StatementList::from(Vec::new())) } } } @@ -145,7 +139,7 @@ impl TokenParser for Script { pub struct ScriptBody; impl TokenParser for ScriptBody { - type Output = (VarDecl, StatementList); + type Output = StatementList; fn parse(self, cursor: &mut Cursor<'_>) -> Result { self::statement::StatementList::new(false, false, false, false).parse(cursor) diff --git a/boa/src/syntax/parser/statement/block/mod.rs b/boa/src/syntax/parser/statement/block/mod.rs index bde56d93efc..1717834b8b6 100644 --- a/boa/src/syntax/parser/statement/block/mod.rs +++ b/boa/src/syntax/parser/statement/block/mod.rs @@ -12,7 +12,7 @@ mod tests; use super::StatementList; use crate::syntax::{ - ast::{node, punc::Punctuator, token::TokenKind}, + ast::{node, Punctuator, TokenKind}, parser::{AllowAwait, AllowReturn, AllowYield, Cursor, ParseError, TokenParser}, }; @@ -61,7 +61,7 @@ impl TokenParser for Block { fn parse(self, cursor: &mut Cursor<'_>) -> Result { cursor.expect(Punctuator::OpenBlock, "block")?; if let Some(tk) = cursor.peek(0) { - if tk.kind == TokenKind::Punctuator(Punctuator::CloseBlock) { + if tk.kind() == &TokenKind::Punctuator(Punctuator::CloseBlock) { cursor.next(); return Ok(node::Block::from(vec![])); } diff --git a/boa/src/syntax/parser/statement/block/tests.rs b/boa/src/syntax/parser/statement/block/tests.rs index 5004700cbd0..9812cf4cad1 100644 --- a/boa/src/syntax/parser/statement/block/tests.rs +++ b/boa/src/syntax/parser/statement/block/tests.rs @@ -2,7 +2,7 @@ use crate::syntax::{ ast::{ - node::{Assign, Block, Local, Node}, + node::{Assign, Block, FunctionDecl, Local, Node, VarDecl, VarDeclList}, op::UnaryOp, }, parser::tests::check_parser, @@ -27,7 +27,7 @@ fn non_empty() { a++; }", Block::from(vec![ - Node::var_decl(vec![("a".into(), Some(Node::const_node(10)))]), + VarDeclList::from(vec![VarDecl::new("a", Some(Node::const_node(10)))]).into(), Node::unary_op(UnaryOp::IncrementPost, Node::from(Local::from("a"))), ]), ); @@ -42,15 +42,17 @@ fn non_empty() { a++; }", Block::from(vec![ - Node::function_decl( - "hello", + FunctionDecl::new( + "hello".to_owned().into_boxed_str(), vec![], - Node::statement_list(vec![Node::return_node(Node::const_node(10))]), - ), - Node::var_decl(vec![( - "a".into(), + vec![Node::return_node(Node::const_node(10))], + ) + .into(), + VarDeclList::from(vec![VarDecl::new( + "a", Some(Node::call(Node::from(Local::from("hello")), vec![])), - )]), + )]) + .into(), Node::unary_op(UnaryOp::IncrementPost, Node::from(Local::from("a"))), ]), ); @@ -66,15 +68,17 @@ fn hoisting() { function hello() { return 10 } }", Block::from(vec![ - Node::function_decl( - "hello", + FunctionDecl::new( + "hello".to_owned().into_boxed_str(), vec![], - Node::statement_list(vec![Node::return_node(Node::const_node(10))]), - ), - Node::var_decl(vec![( - "a".into(), + vec![Node::return_node(Node::const_node(10))], + ) + .into(), + VarDeclList::from(vec![VarDecl::new( + "a", Some(Node::call(Node::from(Local::from("hello")), vec![])), - )]), + )]) + .into(), Node::unary_op(UnaryOp::IncrementPost, Node::from(Local::from("a"))), ]), ); @@ -87,8 +91,8 @@ fn hoisting() { var a; }", Block::from(vec![ - Node::var_decl(vec![("a".into(), None)]), - Node::from(Assign::new(Local::from("a"), Node::const_node(10))), + VarDeclList::from(vec![VarDecl::new("a", None)]).into(), + Assign::new(Local::from("a"), Node::const_node(10)).into(), Node::unary_op(UnaryOp::IncrementPost, Node::from(Local::from("a"))), ]), ); diff --git a/boa/src/syntax/parser/statement/break_stm/mod.rs b/boa/src/syntax/parser/statement/break_stm/mod.rs index 18ea6b26830..18aa1c8050d 100644 --- a/boa/src/syntax/parser/statement/break_stm/mod.rs +++ b/boa/src/syntax/parser/statement/break_stm/mod.rs @@ -12,7 +12,7 @@ mod tests; use super::LabelIdentifier; use crate::syntax::{ - ast::{keyword::Keyword, node::Node, punc::Punctuator, token::TokenKind}, + ast::{Keyword, Node, Punctuator, TokenKind}, parser::{AllowAwait, AllowYield, Cursor, ParseResult, TokenParser}, }; @@ -52,7 +52,7 @@ impl TokenParser for BreakStatement { let label = if let (true, tok) = cursor.peek_semicolon(false) { match tok { - Some(tok) if tok.kind == TokenKind::Punctuator(Punctuator::Semicolon) => { + Some(tok) if tok.kind() == &TokenKind::Punctuator(Punctuator::Semicolon) => { let _ = cursor.next(); } _ => {} diff --git a/boa/src/syntax/parser/statement/continue_stm/mod.rs b/boa/src/syntax/parser/statement/continue_stm/mod.rs index 2ca22998c8d..89414faa2e9 100644 --- a/boa/src/syntax/parser/statement/continue_stm/mod.rs +++ b/boa/src/syntax/parser/statement/continue_stm/mod.rs @@ -12,7 +12,7 @@ mod tests; use super::LabelIdentifier; use crate::syntax::{ - ast::{keyword::Keyword, node::Node, punc::Punctuator, token::TokenKind}, + ast::{Keyword, Node, Punctuator, TokenKind}, parser::{AllowAwait, AllowYield, Cursor, ParseResult, TokenParser}, }; @@ -52,7 +52,7 @@ impl TokenParser for ContinueStatement { let label = if let (true, tok) = cursor.peek_semicolon(false) { match tok { - Some(tok) if tok.kind == TokenKind::Punctuator(Punctuator::Semicolon) => { + Some(tok) if tok.kind() == &TokenKind::Punctuator(Punctuator::Semicolon) => { let _ = cursor.next(); } _ => {} diff --git a/boa/src/syntax/parser/statement/declaration/hoistable.rs b/boa/src/syntax/parser/statement/declaration/hoistable.rs index 691066844b9..44a942a908e 100644 --- a/boa/src/syntax/parser/statement/declaration/hoistable.rs +++ b/boa/src/syntax/parser/statement/declaration/hoistable.rs @@ -6,10 +6,10 @@ //! [spec]: https://tc39.es/ecma262/#prod-HoistableDeclaration use crate::syntax::{ - ast::{keyword::Keyword, node::Node, punc::Punctuator}, + ast::{node::FunctionDecl, Keyword, Node, Punctuator}, parser::{ function::FormalParameters, function::FunctionBody, statement::BindingIdentifier, - AllowAwait, AllowDefault, AllowYield, Cursor, ParseResult, TokenParser, + AllowAwait, AllowDefault, AllowYield, Cursor, ParseError, ParseResult, TokenParser, }, }; @@ -23,12 +23,12 @@ use crate::syntax::{ pub(super) struct HoistableDeclaration { allow_yield: AllowYield, allow_await: AllowAwait, - allow_default: AllowDefault, + is_default: AllowDefault, } impl HoistableDeclaration { /// Creates a new `HoistableDeclaration` parser. - pub(super) fn new(allow_yield: Y, allow_await: A, allow_default: D) -> Self + pub(super) fn new(allow_yield: Y, allow_await: A, is_default: D) -> Self where Y: Into, A: Into, @@ -37,7 +37,7 @@ impl HoistableDeclaration { Self { allow_yield: allow_yield.into(), allow_await: allow_await.into(), - allow_default: allow_default.into(), + is_default: is_default.into(), } } } @@ -47,8 +47,9 @@ impl TokenParser for HoistableDeclaration { fn parse(self, cursor: &mut Cursor<'_>) -> ParseResult { // TODO: check for generators and async functions + generators - FunctionDeclaration::new(self.allow_yield, self.allow_await, self.allow_default) + FunctionDeclaration::new(self.allow_yield, self.allow_await, self.is_default) .parse(cursor) + .map(Node::from) } } @@ -64,12 +65,12 @@ impl TokenParser for HoistableDeclaration { struct FunctionDeclaration { allow_yield: AllowYield, allow_await: AllowAwait, - allow_default: AllowDefault, + is_default: AllowDefault, } impl FunctionDeclaration { /// Creates a new `FunctionDeclaration` parser. - fn new(allow_yield: Y, allow_await: A, allow_default: D) -> Self + fn new(allow_yield: Y, allow_await: A, is_default: D) -> Self where Y: Into, A: Into, @@ -78,17 +79,18 @@ impl FunctionDeclaration { Self { allow_yield: allow_yield.into(), allow_await: allow_await.into(), - allow_default: allow_default.into(), + is_default: is_default.into(), } } } impl TokenParser for FunctionDeclaration { - type Output = Node; + type Output = FunctionDecl; - fn parse(self, cursor: &mut Cursor<'_>) -> ParseResult { + fn parse(self, cursor: &mut Cursor<'_>) -> Result { cursor.expect(Keyword::Function, "function declaration")?; + // TODO: If self.is_default, then this can be empty. let name = BindingIdentifier::new(self.allow_yield, self.allow_await).parse(cursor)?; cursor.expect(Punctuator::OpenParen, "function declaration")?; @@ -98,12 +100,10 @@ impl TokenParser for FunctionDeclaration { cursor.expect(Punctuator::CloseParen, "function declaration")?; cursor.expect(Punctuator::OpenBlock, "function declaration")?; - let body = FunctionBody::new(self.allow_yield, self.allow_await) - .parse(cursor) - .map(Node::statement_list)?; + let body = FunctionBody::new(self.allow_yield, self.allow_await).parse(cursor)?; cursor.expect(Punctuator::CloseBlock, "function declaration")?; - Ok(Node::function_decl(name, params, body)) + Ok(FunctionDecl::new(name, params, body)) } } diff --git a/boa/src/syntax/parser/statement/declaration/lexical.rs b/boa/src/syntax/parser/statement/declaration/lexical.rs index 6d7355e6603..27904da5c78 100644 --- a/boa/src/syntax/parser/statement/declaration/lexical.rs +++ b/boa/src/syntax/parser/statement/declaration/lexical.rs @@ -8,7 +8,7 @@ //! [spec]: https://tc39.es/ecma262/#sec-let-and-const-declarations use crate::syntax::{ - ast::{keyword::Keyword, node::Node, punc::Punctuator, token::TokenKind}, + ast::{Keyword, Node, Punctuator, TokenKind}, parser::{ expression::Initializer, statement::BindingIdentifier, AllowAwait, AllowIn, AllowYield, Cursor, ParseError, ParseResult, TokenParser, @@ -50,7 +50,7 @@ impl TokenParser for LexicalDeclaration { fn parse(self, cursor: &mut Cursor<'_>) -> ParseResult { let tok = cursor.next().ok_or(ParseError::AbruptEnd)?; - match tok.kind { + match tok.kind() { TokenKind::Keyword(Keyword::Const) => { BindingList::new(self.allow_in, self.allow_yield, self.allow_await, true) .parse(cursor) @@ -128,7 +128,7 @@ impl TokenParser for BindingList { match cursor.peek_semicolon(false) { (true, _) => break, - (false, Some(tk)) if tk.kind == TokenKind::Punctuator(Punctuator::Comma) => { + (false, Some(tk)) if tk.kind() == &TokenKind::Punctuator(Punctuator::Comma) => { let _ = cursor.next(); } _ => { diff --git a/boa/src/syntax/parser/statement/declaration/mod.rs b/boa/src/syntax/parser/statement/declaration/mod.rs index 11355289c3c..e7e9b463f2b 100644 --- a/boa/src/syntax/parser/statement/declaration/mod.rs +++ b/boa/src/syntax/parser/statement/declaration/mod.rs @@ -14,7 +14,7 @@ mod tests; use self::{hoistable::HoistableDeclaration, lexical::LexicalDeclaration}; use crate::syntax::{ - ast::{keyword::Keyword, node::Node, token::TokenKind}, + ast::{Keyword, Node, TokenKind}, parser::{AllowAwait, AllowYield, Cursor, ParseError, ParseResult, TokenParser}, }; @@ -49,7 +49,7 @@ impl TokenParser for Declaration { fn parse(self, cursor: &mut Cursor<'_>) -> ParseResult { let tok = cursor.peek(0).ok_or(ParseError::AbruptEnd)?; - match tok.kind { + match tok.kind() { TokenKind::Keyword(Keyword::Function) => { HoistableDeclaration::new(self.allow_yield, self.allow_await, false).parse(cursor) } diff --git a/boa/src/syntax/parser/statement/declaration/tests.rs b/boa/src/syntax/parser/statement/declaration/tests.rs index 1bb4154f6ad..1f95e7a6a04 100644 --- a/boa/src/syntax/parser/statement/declaration/tests.rs +++ b/boa/src/syntax/parser/statement/declaration/tests.rs @@ -1,5 +1,5 @@ use crate::syntax::{ - ast::node::Node, + ast::node::{FunctionDecl, Node, VarDecl, VarDeclList}, parser::tests::{check_invalid, check_parser}, }; @@ -8,10 +8,7 @@ use crate::syntax::{ fn var_declaration() { check_parser( "var a = 5;", - vec![Node::var_decl(vec![( - "a".into(), - Some(Node::const_node(5)), - )])], + vec![VarDeclList::from(vec![VarDecl::new("a", Some(Node::const_node(5)))]).into()], ); } @@ -20,18 +17,12 @@ fn var_declaration() { fn var_declaration_keywords() { check_parser( "var yield = 5;", - vec![Node::var_decl(vec![( - "yield".into(), - Some(Node::const_node(5)), - )])], + vec![VarDeclList::from(vec![VarDecl::new("yield", Some(Node::const_node(5)))]).into()], ); check_parser( "var await = 5;", - vec![Node::var_decl(vec![( - "await".into(), - Some(Node::const_node(5)), - )])], + vec![VarDeclList::from(vec![VarDecl::new("await", Some(Node::const_node(5)))]).into()], ); } @@ -40,17 +31,17 @@ fn var_declaration_keywords() { fn var_declaration_no_spaces() { check_parser( "var a=5;", - vec![Node::var_decl(vec![( - "a".into(), - Some(Node::const_node(5)), - )])], + vec![VarDeclList::from(vec![VarDecl::new("a", Some(Node::const_node(5)))]).into()], ); } /// Checks empty `var` declaration parsing. #[test] fn empty_var_declaration() { - check_parser("var a;", vec![Node::var_decl(vec![("a".into(), None)])]); + check_parser( + "var a;", + vec![VarDeclList::from(vec![VarDecl::new("a", None)]).into()], + ); } /// Checks multiple `var` declarations. @@ -58,11 +49,12 @@ fn empty_var_declaration() { fn multiple_var_declaration() { check_parser( "var a = 5, b, c = 6;", - vec![Node::var_decl(vec![ - ("a".into(), Some(Node::const_node(5))), - ("b".into(), None), - ("c".into(), Some(Node::const_node(6))), - ])], + vec![VarDeclList::from(vec![ + VarDecl::new("a", Some(Node::const_node(5))), + VarDecl::new("b", None), + VarDecl::new("c", Some(Node::const_node(6))), + ]) + .into()], ); } @@ -190,11 +182,7 @@ fn multiple_const_declaration() { fn function_declaration() { check_parser( "function hello() {}", - vec![Node::function_decl( - "hello", - vec![], - Node::statement_list(vec![]), - )], + vec![FunctionDecl::new(Box::from("hello"), vec![], vec![]).into()], ); } @@ -203,19 +191,11 @@ fn function_declaration() { fn function_declaration_keywords() { check_parser( "function yield() {}", - vec![Node::function_decl( - "yield", - vec![], - Node::statement_list(vec![]), - )], + vec![FunctionDecl::new(Box::from("yield"), vec![], vec![]).into()], ); check_parser( "function await() {}", - vec![Node::function_decl( - "await", - vec![], - Node::statement_list(vec![]), - )], + vec![FunctionDecl::new(Box::from("await"), vec![], vec![]).into()], ); } diff --git a/boa/src/syntax/parser/statement/if_stm/mod.rs b/boa/src/syntax/parser/statement/if_stm/mod.rs index 8caeab0101d..93ebcb934eb 100644 --- a/boa/src/syntax/parser/statement/if_stm/mod.rs +++ b/boa/src/syntax/parser/statement/if_stm/mod.rs @@ -3,7 +3,7 @@ mod tests; use super::Statement; use crate::syntax::{ - ast::{keyword::Keyword, node::Node, punc::Punctuator, token::TokenKind}, + ast::{Keyword, Node, Punctuator, TokenKind}, parser::{ expression::Expression, AllowAwait, AllowReturn, AllowYield, Cursor, ParseResult, TokenParser, @@ -58,7 +58,7 @@ impl TokenParser for IfStatement { Statement::new(self.allow_yield, self.allow_await, self.allow_return).parse(cursor)?; let else_stm = match cursor.next() { - Some(else_tok) if else_tok.kind == TokenKind::Keyword(Keyword::Else) => Some( + Some(else_tok) if else_tok.kind() == &TokenKind::Keyword(Keyword::Else) => Some( Statement::new(self.allow_yield, self.allow_await, self.allow_return) .parse(cursor)?, ), diff --git a/boa/src/syntax/parser/statement/iteration/do_while_statement.rs b/boa/src/syntax/parser/statement/iteration/do_while_statement.rs index d7ac6b88310..78b7e78a607 100644 --- a/boa/src/syntax/parser/statement/iteration/do_while_statement.rs +++ b/boa/src/syntax/parser/statement/iteration/do_while_statement.rs @@ -8,7 +8,7 @@ //! [spec]: https://tc39.es/ecma262/#sec-do-while-statement use crate::syntax::{ - ast::{keyword::Keyword, node::Node, punc::Punctuator, token::TokenKind}, + ast::{Keyword, Node, Punctuator, TokenKind}, parser::{ expression::Expression, statement::Statement, AllowAwait, AllowReturn, AllowYield, Cursor, ParseError, ParseResult, TokenParser, @@ -61,7 +61,7 @@ impl TokenParser for DoWhileStatement { let next_token = cursor.peek(0).ok_or(ParseError::AbruptEnd)?; - if next_token.kind != TokenKind::Keyword(Keyword::While) { + if next_token.kind() != &TokenKind::Keyword(Keyword::While) { return Err(ParseError::Expected( vec![TokenKind::Keyword(Keyword::While)], next_token.clone(), diff --git a/boa/src/syntax/parser/statement/iteration/for_statement.rs b/boa/src/syntax/parser/statement/iteration/for_statement.rs index 4ee1ae2a529..1913f415443 100644 --- a/boa/src/syntax/parser/statement/iteration/for_statement.rs +++ b/boa/src/syntax/parser/statement/iteration/for_statement.rs @@ -9,10 +9,8 @@ use crate::syntax::{ ast::{ - keyword::Keyword, node::{Block, Node}, - punc::Punctuator, - token::TokenKind, + Keyword, Punctuator, TokenKind, }, parser::{ expression::Expression, @@ -64,10 +62,11 @@ impl TokenParser for ForStatement { cursor.expect(Keyword::For, "for statement")?; cursor.expect(Punctuator::OpenParen, "for statement")?; - let init = match cursor.peek(0).ok_or(ParseError::AbruptEnd)?.kind { + let init = match cursor.peek(0).ok_or(ParseError::AbruptEnd)?.kind() { TokenKind::Keyword(Keyword::Var) => Some( VariableDeclarationList::new(false, self.allow_yield, self.allow_await) - .parse(cursor)?, + .parse(cursor) + .map(Node::from)?, ), TokenKind::Keyword(Keyword::Let) | TokenKind::Keyword(Keyword::Const) => { Some(Declaration::new(self.allow_yield, self.allow_await).parse(cursor)?) diff --git a/boa/src/syntax/parser/statement/iteration/tests.rs b/boa/src/syntax/parser/statement/iteration/tests.rs index 27c3de66092..ad7e1eaa375 100644 --- a/boa/src/syntax/parser/statement/iteration/tests.rs +++ b/boa/src/syntax/parser/statement/iteration/tests.rs @@ -1,6 +1,8 @@ use crate::syntax::{ - ast::node::{BinOp, Block, Local, Node}, - ast::op::{AssignOp, CompOp, UnaryOp}, + ast::{ + node::{BinOp, Block, Local, Node, VarDecl, VarDeclList}, + op::{AssignOp, CompOp, UnaryOp}, + }, parser::tests::check_parser, }; @@ -29,7 +31,7 @@ fn check_do_while_semicolon_insertion() { r#"var i = 0; do {console.log("hello");} while(i++ < 10) console.log("end");"#, vec![ - Node::var_decl(vec![("i".into(), Some(Node::const_node(0)))]), + VarDeclList::from(vec![VarDecl::new("i", Some(Node::const_node(0)))]).into(), Node::do_while_loop( Node::from(Block::from(vec![Node::call( Node::get_const_field(Node::from(Local::from("console")), "log"), diff --git a/boa/src/syntax/parser/statement/iteration/while_statement.rs b/boa/src/syntax/parser/statement/iteration/while_statement.rs index 2b9f765ec0f..dd8040ae248 100644 --- a/boa/src/syntax/parser/statement/iteration/while_statement.rs +++ b/boa/src/syntax/parser/statement/iteration/while_statement.rs @@ -1,5 +1,5 @@ use crate::syntax::{ - ast::{keyword::Keyword, node::Node, punc::Punctuator}, + ast::{Keyword, Node, Punctuator}, parser::{ expression::Expression, statement::Statement, AllowAwait, AllowReturn, AllowYield, Cursor, ParseResult, TokenParser, diff --git a/boa/src/syntax/parser/statement/mod.rs b/boa/src/syntax/parser/statement/mod.rs index be222241c74..fd9d753a19a 100644 --- a/boa/src/syntax/parser/statement/mod.rs +++ b/boa/src/syntax/parser/statement/mod.rs @@ -36,7 +36,7 @@ use super::{ expression::Expression, AllowAwait, AllowReturn, AllowYield, Cursor, ParseError, ParseResult, TokenParser, }; -use crate::syntax::ast::{keyword::Keyword, node::Node, punc::Punctuator, token::TokenKind}; +use crate::syntax::ast::{node, Keyword, Node, Punctuator, TokenKind}; /// Statement parsing. /// @@ -93,13 +93,15 @@ impl TokenParser for Statement { // TODO: add BreakableStatement and divide Whiles, fors and so on to another place. let tok = cursor.peek(0).ok_or(ParseError::AbruptEnd)?; - match tok.kind { + match tok.kind() { TokenKind::Keyword(Keyword::If) => { IfStatement::new(self.allow_yield, self.allow_await, self.allow_return) .parse(cursor) } TokenKind::Keyword(Keyword::Var) => { - VariableStatement::new(self.allow_yield, self.allow_await).parse(cursor) + VariableStatement::new(self.allow_yield, self.allow_await) + .parse(cursor) + .map(Node::from) } TokenKind::Keyword(Keyword::While) => { WhileStatement::new(self.allow_yield, self.allow_await, self.allow_return) @@ -190,14 +192,14 @@ impl StatementList { } impl TokenParser for StatementList { - type Output = (VarDecl, StatementList); + type Output = node::StatementList; fn parse(self, cursor: &mut Cursor<'_>) -> Result { let mut items = Vec::new(); loop { match cursor.peek(0) { - Some(token) if token.kind == TokenKind::Punctuator(Punctuator::CloseBlock) => { + Some(token) if token.kind() == &TokenKind::Punctuator(Punctuator::CloseBlock) => { if self.break_when_closingbrase { break; } else { @@ -225,7 +227,7 @@ impl TokenParser for StatementList { items.sort_by(Node::hoistable_order); - Ok(items) + Ok(items.into()) } } @@ -268,7 +270,7 @@ impl TokenParser for StatementListItem { fn parse(self, cursor: &mut Cursor<'_>) -> ParseResult { let tok = cursor.peek(0).ok_or(ParseError::AbruptEnd)?; - match tok.kind { + match tok.kind() { TokenKind::Keyword(Keyword::Function) | TokenKind::Keyword(Keyword::Const) | TokenKind::Keyword(Keyword::Let) => { @@ -364,7 +366,7 @@ impl TokenParser for BindingIdentifier { let next_token = cursor.next().ok_or(ParseError::AbruptEnd)?; - match next_token.kind { + match next_token.kind() { TokenKind::Identifier(ref s) => Ok(s.as_str().into()), TokenKind::Keyword(k @ Keyword::Yield) if !self.allow_yield.0 => Ok(k.as_str().into()), TokenKind::Keyword(k @ Keyword::Await) if !self.allow_await.0 => Ok(k.as_str().into()), diff --git a/boa/src/syntax/parser/statement/return_stm/mod.rs b/boa/src/syntax/parser/statement/return_stm/mod.rs index 54a7bf5cce8..e2167515c37 100644 --- a/boa/src/syntax/parser/statement/return_stm/mod.rs +++ b/boa/src/syntax/parser/statement/return_stm/mod.rs @@ -2,7 +2,7 @@ mod tests; use crate::syntax::{ - ast::{keyword::Keyword, node::Node, punc::Punctuator, token::TokenKind}, + ast::{Keyword, Node, Punctuator, TokenKind}, parser::{expression::Expression, AllowAwait, AllowYield, Cursor, ParseResult, TokenParser}, }; @@ -43,8 +43,8 @@ impl TokenParser for ReturnStatement { if let (true, tok) = cursor.peek_semicolon(false) { match tok { Some(tok) - if tok.kind == TokenKind::Punctuator(Punctuator::Semicolon) - || tok.kind == TokenKind::LineTerminator => + if tok.kind() == &TokenKind::Punctuator(Punctuator::Semicolon) + || tok.kind() == &TokenKind::LineTerminator => { let _ = cursor.next(); } diff --git a/boa/src/syntax/parser/statement/switch/mod.rs b/boa/src/syntax/parser/statement/switch/mod.rs index 1c187b7f529..c8416149a03 100644 --- a/boa/src/syntax/parser/statement/switch/mod.rs +++ b/boa/src/syntax/parser/statement/switch/mod.rs @@ -2,7 +2,7 @@ mod tests; use crate::syntax::{ - ast::{keyword::Keyword, node::Node, punc::Punctuator}, + ast::{Keyword, Node, Punctuator}, parser::{ expression::Expression, AllowAwait, AllowReturn, AllowYield, Cursor, ParseError, ParseResult, TokenParser, diff --git a/boa/src/syntax/parser/statement/throw/mod.rs b/boa/src/syntax/parser/statement/throw/mod.rs index bbfbfba9db8..711ff56587d 100644 --- a/boa/src/syntax/parser/statement/throw/mod.rs +++ b/boa/src/syntax/parser/statement/throw/mod.rs @@ -2,7 +2,7 @@ mod tests; use crate::syntax::{ - ast::{keyword::Keyword, node::Node, punc::Punctuator, token::TokenKind}, + ast::{Keyword, Node, Punctuator, TokenKind}, parser::{expression::Expression, AllowAwait, AllowYield, Cursor, ParseResult, TokenParser}, }; @@ -44,7 +44,7 @@ impl TokenParser for ThrowStatement { let expr = Expression::new(true, self.allow_yield, self.allow_await).parse(cursor)?; if let Some(tok) = cursor.peek(0) { - if tok.kind == TokenKind::Punctuator(Punctuator::Semicolon) { + if tok.kind() == &TokenKind::Punctuator(Punctuator::Semicolon) { let _ = cursor.next(); } } diff --git a/boa/src/syntax/parser/statement/throw/tests.rs b/boa/src/syntax/parser/statement/throw/tests.rs index f0919d18701..9557d46fd35 100644 --- a/boa/src/syntax/parser/statement/throw/tests.rs +++ b/boa/src/syntax/parser/statement/throw/tests.rs @@ -1,4 +1,4 @@ -use crate::syntax::{ast::node::Node, parser::tests::check_parser}; +use crate::syntax::{ast::Node, parser::tests::check_parser}; #[test] fn check_throw_parsing() { diff --git a/boa/src/syntax/parser/statement/try_stm/catch.rs b/boa/src/syntax/parser/statement/try_stm/catch.rs index 717a6539906..7fac5a05527 100644 --- a/boa/src/syntax/parser/statement/try_stm/catch.rs +++ b/boa/src/syntax/parser/statement/try_stm/catch.rs @@ -1,8 +1,7 @@ use crate::syntax::{ ast::{ - keyword::Keyword, node::{self, Local}, - punc::Punctuator, + Keyword, Punctuator, }, parser::{ statement::{block::Block, BindingIdentifier}, diff --git a/boa/src/syntax/parser/statement/try_stm/finally.rs b/boa/src/syntax/parser/statement/try_stm/finally.rs index d187c3c1381..ff9e4d65c23 100644 --- a/boa/src/syntax/parser/statement/try_stm/finally.rs +++ b/boa/src/syntax/parser/statement/try_stm/finally.rs @@ -1,5 +1,5 @@ use crate::syntax::{ - ast::{keyword::Keyword, node}, + ast::{node, Keyword}, parser::{ statement::block::Block, AllowAwait, AllowReturn, AllowYield, Cursor, ParseError, TokenParser, diff --git a/boa/src/syntax/parser/statement/try_stm/mod.rs b/boa/src/syntax/parser/statement/try_stm/mod.rs index 8bc7c7ed232..c80b1c130e8 100644 --- a/boa/src/syntax/parser/statement/try_stm/mod.rs +++ b/boa/src/syntax/parser/statement/try_stm/mod.rs @@ -8,7 +8,7 @@ use self::catch::Catch; use self::finally::Finally; use super::block::Block; use crate::syntax::{ - ast::{keyword::Keyword, node::Node, token::TokenKind}, + ast::{Keyword, Node, TokenKind}, parser::{AllowAwait, AllowReturn, AllowYield, Cursor, ParseError, ParseResult, TokenParser}, }; @@ -55,8 +55,8 @@ impl TokenParser for TryStatement { let next_token = cursor.peek(0).ok_or(ParseError::AbruptEnd)?; - if next_token.kind != TokenKind::Keyword(Keyword::Catch) - && next_token.kind != TokenKind::Keyword(Keyword::Finally) + if next_token.kind() != &TokenKind::Keyword(Keyword::Catch) + && next_token.kind() != &TokenKind::Keyword(Keyword::Finally) { return Err(ParseError::Expected( vec![ @@ -68,7 +68,7 @@ impl TokenParser for TryStatement { )); } - let catch = if next_token.kind == TokenKind::Keyword(Keyword::Catch) { + let catch = if next_token.kind() == &TokenKind::Keyword(Keyword::Catch) { Some(Catch::new(self.allow_yield, self.allow_await, self.allow_return).parse(cursor)?) } else { None @@ -76,7 +76,7 @@ impl TokenParser for TryStatement { let next_token = cursor.peek(0); let finally_block = match next_token { - Some(token) => match token.kind { + Some(token) => match token.kind() { TokenKind::Keyword(Keyword::Finally) => Some( Finally::new(self.allow_yield, self.allow_await, self.allow_return) .parse(cursor)?, diff --git a/boa/src/syntax/parser/statement/try_stm/tests.rs b/boa/src/syntax/parser/statement/try_stm/tests.rs index 3cf72568beb..a95dd241d22 100644 --- a/boa/src/syntax/parser/statement/try_stm/tests.rs +++ b/boa/src/syntax/parser/statement/try_stm/tests.rs @@ -1,5 +1,5 @@ use crate::syntax::{ - ast::node::{Block, Local, Node}, + ast::node::{Block, Local, Node, VarDecl, VarDeclList}, parser::tests::{check_invalid, check_parser}, }; @@ -20,10 +20,11 @@ fn check_inline_with_var_decl_inside_try() { check_parser( "try { var x = 1; } catch(e) {}", vec![Node::try_node( - Block::from(vec![Node::var_decl(vec![( - "x".into(), + Block::from(vec![VarDeclList::from(vec![VarDecl::new( + "x", Some(Node::const_node(1)), - )])]), + )]) + .into()]), Some((Some(Local::from("e")), Block::from(vec![]))), None, )], @@ -35,16 +36,18 @@ fn check_inline_with_var_decl_inside_catch() { check_parser( "try { var x = 1; } catch(e) { var x = 1; }", vec![Node::try_node( - Block::from(vec![Node::var_decl(vec![( - "x".into(), + Block::from(vec![VarDeclList::from(vec![VarDecl::new( + "x", Some(Node::const_node(1)), - )])]), + )]) + .into()]), Some(( Some(Local::from("e")), - Block::from(vec![Node::var_decl(vec![( - "x".into(), + Block::from(vec![VarDeclList::from(vec![VarDecl::new( + "x", Some(Node::const_node(1)), - )])]), + )]) + .into()]), )), None, )], @@ -82,10 +85,11 @@ fn check_inline_with_empty_try_var_decl_in_finally() { vec![Node::try_node( Block::from(vec![]), None, - Block::from(vec![Node::var_decl(vec![( - "x".into(), + Block::from(vec![VarDeclList::from(vec![VarDecl::new( + "x", Some(Node::const_node(1)), - )])]), + )]) + .into()]), )], ); } @@ -98,10 +102,11 @@ fn check_inline_empty_try_paramless_catch() { Block::from(vec![]), Some(( None, - Block::from(vec![Node::var_decl(vec![( - "x".into(), + Block::from(vec![VarDeclList::from(vec![VarDecl::new( + "x", Some(Node::const_node(1)), - )])]), + )]) + .into()]), )), None, )], diff --git a/boa/src/syntax/parser/statement/variable.rs b/boa/src/syntax/parser/statement/variable.rs index bd045a22a50..75be78e53ac 100644 --- a/boa/src/syntax/parser/statement/variable.rs +++ b/boa/src/syntax/parser/statement/variable.rs @@ -1,9 +1,12 @@ // use super::lexical_declaration_continuation; use crate::syntax::{ - ast::{keyword::Keyword, node::Node, punc::Punctuator, token::TokenKind}, + ast::{ + node::{VarDecl, VarDeclList}, + Keyword, Punctuator, TokenKind, + }, parser::{ expression::Initializer, statement::BindingIdentifier, AllowAwait, AllowIn, AllowYield, - Cursor, ParseError, ParseResult, TokenParser, + Cursor, ParseError, TokenParser, }, }; @@ -38,9 +41,9 @@ impl VariableStatement { } impl TokenParser for VariableStatement { - type Output = Node; + type Output = VarDeclList; - fn parse(self, cursor: &mut Cursor<'_>) -> ParseResult { + fn parse(self, cursor: &mut Cursor<'_>) -> Result { cursor.expect(Keyword::Var, "variable statement")?; let decl_list = @@ -88,9 +91,9 @@ impl VariableDeclarationList { } impl TokenParser for VariableDeclarationList { - type Output = Node; + type Output = VarDeclList; - fn parse(self, cursor: &mut Cursor<'_>) -> ParseResult { + fn parse(self, cursor: &mut Cursor<'_>) -> Result { let mut list = Vec::new(); loop { @@ -101,7 +104,7 @@ impl TokenParser for VariableDeclarationList { match cursor.peek_semicolon(false) { (true, _) => break, - (false, Some(tk)) if tk.kind == TokenKind::Punctuator(Punctuator::Comma) => { + (false, Some(tk)) if tk.kind() == &TokenKind::Punctuator(Punctuator::Comma) => { let _ = cursor.next(); } _ => { @@ -117,7 +120,7 @@ impl TokenParser for VariableDeclarationList { } } - Ok(Node::var_decl(list)) + Ok(VarDeclList::from(list)) } } @@ -151,7 +154,7 @@ impl VariableDeclaration { } impl TokenParser for VariableDeclaration { - type Output = (Box, Option); + type Output = VarDecl; fn parse(self, cursor: &mut Cursor<'_>) -> Result { // TODO: BindingPattern @@ -161,6 +164,6 @@ impl TokenParser for VariableDeclaration { let ident = Initializer::new(self.allow_in, self.allow_yield, self.allow_await).try_parse(cursor); - Ok((name, ident)) + Ok(VarDecl::new(name, ident)) } } diff --git a/boa/src/syntax/parser/tests.rs b/boa/src/syntax/parser/tests.rs index d34dd0e2d73..fb7ecd19c68 100644 --- a/boa/src/syntax/parser/tests.rs +++ b/boa/src/syntax/parser/tests.rs @@ -2,7 +2,7 @@ use super::Parser; use crate::syntax::{ - ast::node::{Assign, BinOp, Local, Node}, + ast::node::{Assign, BinOp, FunctionDecl, Local, Node, StatementList, VarDecl, VarDeclList}, ast::op::{NumOp, UnaryOp}, lexer::Lexer, }; @@ -21,7 +21,7 @@ where Parser::new(&lexer.tokens) .parse_all() .expect("failed to parse"), - Node::statement_list(expr) + StatementList::from(expr) ); } @@ -70,15 +70,17 @@ fn hoisting() { function hello() { return 10 }", vec![ - Node::function_decl( - "hello", + FunctionDecl::new( + Box::from("hello"), vec![], - Node::statement_list(vec![Node::return_node(Node::const_node(10))]), - ), - Node::var_decl(vec![( - "a".into(), + vec![Node::return_node(Node::const_node(10))], + ) + .into(), + VarDeclList::from(vec![VarDecl::new( + "a", Some(Node::call(Node::from(Local::from("hello")), vec![])), - )]), + )]) + .into(), Node::unary_op(UnaryOp::IncrementPost, Node::from(Local::from("a"))), ], ); @@ -90,7 +92,7 @@ fn hoisting() { var a;", vec![ - Node::var_decl(vec![("a".into(), None)]), + VarDeclList::from(vec![VarDecl::new("a", None)]).into(), Node::from(Assign::new(Local::from("a"), Node::const_node(10))), Node::unary_op(UnaryOp::IncrementPost, Node::from(Local::from("a"))), ],