feat: add dependency
This commit is contained in:
205
javascript-engine/external/boa/boa_parser/src/error.rs
vendored
Normal file
205
javascript-engine/external/boa/boa_parser/src/error.rs
vendored
Normal file
@@ -0,0 +1,205 @@
|
||||
//! Error and result implementation for the parser.
|
||||
|
||||
use crate::lexer::Error as LexError;
|
||||
use boa_ast::{Position, Span};
|
||||
use std::fmt;
|
||||
|
||||
/// Result of a parsing operation.
|
||||
pub type ParseResult<T> = Result<T, Error>;
|
||||
|
||||
pub(crate) trait ErrorContext {
|
||||
fn context(self, context: &'static str) -> Self;
|
||||
}
|
||||
|
||||
impl<T> ErrorContext for ParseResult<T> {
|
||||
fn context(self, context: &'static str) -> Self {
|
||||
self.map_err(|e| e.context(context))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LexError> for Error {
|
||||
#[inline]
|
||||
fn from(e: LexError) -> Self {
|
||||
Self::lex(e)
|
||||
}
|
||||
}
|
||||
|
||||
/// An enum which represents errors encountered during parsing an expression
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
/// When it expected a certain kind of token, but got another as part of something
|
||||
Expected {
|
||||
/// The token(s) that were expected.
|
||||
expected: Box<[String]>,
|
||||
|
||||
/// The token that was not expected.
|
||||
found: Box<str>,
|
||||
|
||||
/// The parsing context in which the error occurred.
|
||||
context: &'static str,
|
||||
|
||||
/// Position of the source code where the error occurred.
|
||||
span: Span,
|
||||
},
|
||||
|
||||
/// When a token is unexpected
|
||||
Unexpected {
|
||||
/// The error message.
|
||||
message: Option<&'static str>,
|
||||
|
||||
/// The token that was not expected.
|
||||
found: Box<str>,
|
||||
|
||||
/// Position of the source code where the error occurred.
|
||||
span: Span,
|
||||
},
|
||||
|
||||
/// When there is an abrupt end to the parsing
|
||||
AbruptEnd,
|
||||
|
||||
/// A lexing error.
|
||||
Lex {
|
||||
/// The error that occurred during lexing.
|
||||
err: LexError,
|
||||
},
|
||||
|
||||
/// Catch all General Error
|
||||
General {
|
||||
/// The error message.
|
||||
message: &'static str,
|
||||
|
||||
/// Position of the source code where the error occurred.
|
||||
position: Position,
|
||||
},
|
||||
}
|
||||
|
||||
impl Error {
|
||||
/// Changes the context of the error, if any.
|
||||
fn context(self, new_context: &'static str) -> Self {
|
||||
match self {
|
||||
Self::Expected {
|
||||
expected,
|
||||
found,
|
||||
span,
|
||||
..
|
||||
} => Self::expected(expected, found, span, new_context),
|
||||
e => e,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates an `Expected` parsing error.
|
||||
pub(crate) fn expected<E, F>(expected: E, found: F, span: Span, context: &'static str) -> Self
|
||||
where
|
||||
E: Into<Box<[String]>>,
|
||||
F: Into<Box<str>>,
|
||||
{
|
||||
Self::Expected {
|
||||
expected: expected.into(),
|
||||
found: found.into(),
|
||||
span,
|
||||
context,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates an `Expected` parsing error.
|
||||
pub(crate) fn unexpected<F, C>(found: F, span: Span, message: C) -> Self
|
||||
where
|
||||
F: Into<Box<str>>,
|
||||
C: Into<Option<&'static str>>,
|
||||
{
|
||||
Self::Unexpected {
|
||||
found: found.into(),
|
||||
span,
|
||||
message: message.into(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a "general" parsing error.
|
||||
pub(crate) const fn general(message: &'static str, position: Position) -> Self {
|
||||
Self::General { message, position }
|
||||
}
|
||||
|
||||
/// Creates a "general" parsing error with the specific error message for a wrong function declaration in non-strict mode.
|
||||
pub(crate) const fn wrong_function_declaration_non_strict(position: Position) -> Self {
|
||||
Self::General {
|
||||
message: "In non-strict mode code, functions can only be declared at top level, inside a block, or as the body of an if statement.",
|
||||
position
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a "general" parsing error with the specific error message for a wrong function declaration with label.
|
||||
pub(crate) const fn wrong_labelled_function_declaration(position: Position) -> Self {
|
||||
Self::General {
|
||||
message: "Labelled functions can only be declared at top level or inside a block",
|
||||
position,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a parsing error from a lexing error.
|
||||
pub(crate) const fn lex(e: LexError) -> Self {
|
||||
Self::Lex { err: e }
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Expected {
|
||||
expected,
|
||||
found,
|
||||
span,
|
||||
context,
|
||||
} => write!(
|
||||
f,
|
||||
"expected {}, got '{found}' in {context} at line {}, col {}",
|
||||
if expected.len() == 1 {
|
||||
format!(
|
||||
"token '{}'",
|
||||
expected.first().expect("already checked that length is 1")
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"one of {}",
|
||||
expected
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, t)| {
|
||||
format!(
|
||||
"{}'{t}'",
|
||||
if i == 0 {
|
||||
""
|
||||
} else if i == expected.len() - 1 {
|
||||
" or "
|
||||
} else {
|
||||
", "
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<String>()
|
||||
)
|
||||
},
|
||||
span.start().line_number(),
|
||||
span.start().column_number()
|
||||
),
|
||||
Self::Unexpected {
|
||||
found,
|
||||
span,
|
||||
message,
|
||||
} => write!(
|
||||
f,
|
||||
"unexpected token '{found}'{} at line {}, col {}",
|
||||
message.map_or_else(String::new, |m| format!(", {m}")),
|
||||
span.start().line_number(),
|
||||
span.start().column_number()
|
||||
),
|
||||
Self::AbruptEnd => f.write_str("abrupt end"),
|
||||
Self::General { message, position } => write!(
|
||||
f,
|
||||
"{message} at line {}, col {}",
|
||||
position.line_number(),
|
||||
position.column_number()
|
||||
),
|
||||
Self::Lex { err } => fmt::Display::fmt(err, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
135
javascript-engine/external/boa/boa_parser/src/lexer/comment.rs
vendored
Normal file
135
javascript-engine/external/boa/boa_parser/src/lexer/comment.rs
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
//! Boa's lexing for ECMAScript comments.
|
||||
|
||||
use crate::lexer::{Cursor, Error, Token, TokenKind, Tokenizer};
|
||||
use boa_ast::{Position, Span};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Lexes a single line comment.
|
||||
///
|
||||
/// Assumes that the initial '//' is already consumed.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript reference][spec]
|
||||
/// - [MDN documentation][mdn]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-comments
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Lexical_grammar
|
||||
pub(super) struct SingleLineComment;
|
||||
|
||||
impl<R> Tokenizer<R> for SingleLineComment {
|
||||
fn lex(
|
||||
&mut self,
|
||||
cursor: &mut Cursor<R>,
|
||||
start_pos: Position,
|
||||
_interner: &mut Interner,
|
||||
) -> Result<Token, Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
let _timer = Profiler::global().start_event("SingleLineComment", "Lexing");
|
||||
|
||||
// Skip either to the end of the line or to the end of the input
|
||||
while let Some(ch) = cursor.peek_char()? {
|
||||
let tried_ch = char::try_from(ch);
|
||||
match tried_ch {
|
||||
Ok(c) if c == '\r' || c == '\n' || c == '\u{2028}' || c == '\u{2029}' => break,
|
||||
_ => {}
|
||||
};
|
||||
cursor.next_char().expect("Comment character vanished");
|
||||
}
|
||||
Ok(Token::new(
|
||||
TokenKind::Comment,
|
||||
Span::new(start_pos, cursor.pos()),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Lexes a block (multi-line) comment.
|
||||
///
|
||||
/// Assumes that the initial '/*' is already consumed.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript reference][spec]
|
||||
/// - [MDN documentation][mdn]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-comments
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Lexical_grammar
|
||||
pub(super) struct MultiLineComment;
|
||||
|
||||
impl<R> Tokenizer<R> for MultiLineComment {
|
||||
fn lex(
|
||||
&mut self,
|
||||
cursor: &mut Cursor<R>,
|
||||
start_pos: Position,
|
||||
_interner: &mut Interner,
|
||||
) -> Result<Token, Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
let _timer = Profiler::global().start_event("MultiLineComment", "Lexing");
|
||||
|
||||
let mut new_line = false;
|
||||
while let Some(ch) = cursor.next_char()? {
|
||||
let tried_ch = char::try_from(ch);
|
||||
match tried_ch {
|
||||
Ok(c) if c == '*' && cursor.next_is(b'/')? => {
|
||||
return Ok(Token::new(
|
||||
if new_line {
|
||||
TokenKind::LineTerminator
|
||||
} else {
|
||||
TokenKind::Comment
|
||||
},
|
||||
Span::new(start_pos, cursor.pos()),
|
||||
))
|
||||
}
|
||||
Ok(c) if c == '\r' || c == '\n' || c == '\u{2028}' || c == '\u{2029}' => {
|
||||
new_line = true;
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
}
|
||||
|
||||
Err(Error::syntax(
|
||||
"unterminated multiline comment",
|
||||
cursor.pos(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
///Lexes a first line Hashbang comment
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript reference][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-ecmascript-language-lexical-grammar
|
||||
|
||||
pub(super) struct HashbangComment;
|
||||
|
||||
impl<R> Tokenizer<R> for HashbangComment {
|
||||
fn lex(
|
||||
&mut self,
|
||||
cursor: &mut Cursor<R>,
|
||||
start_pos: Position,
|
||||
_interner: &mut Interner,
|
||||
) -> Result<Token, Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
let _timer = Profiler::global().start_event("Hashbang", "Lexing");
|
||||
|
||||
while let Some(ch) = cursor.next_char()? {
|
||||
let tried_ch = char::try_from(ch);
|
||||
match tried_ch {
|
||||
Ok(c) if c == '\r' || c == '\n' || c == '\u{2028}' || c == '\u{2029}' => break,
|
||||
_ => {}
|
||||
};
|
||||
}
|
||||
|
||||
Ok(Token::new(
|
||||
TokenKind::Comment,
|
||||
Span::new(start_pos, cursor.pos()),
|
||||
))
|
||||
}
|
||||
}
|
||||
491
javascript-engine/external/boa/boa_parser/src/lexer/cursor.rs
vendored
Normal file
491
javascript-engine/external/boa/boa_parser/src/lexer/cursor.rs
vendored
Normal file
@@ -0,0 +1,491 @@
|
||||
//! Boa's lexer cursor that manages the input byte stream.
|
||||
use boa_ast::Position;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::{self, Bytes, Error, ErrorKind, Read};
|
||||
|
||||
/// Cursor over the source code.
|
||||
#[derive(Debug)]
|
||||
pub(super) struct Cursor<R> {
|
||||
iter: InnerIter<R>,
|
||||
pos: Position,
|
||||
strict_mode: bool,
|
||||
}
|
||||
|
||||
impl<R> Cursor<R> {
|
||||
/// Gets the current position of the cursor in the source code.
|
||||
pub(super) const fn pos(&self) -> Position {
|
||||
self.pos
|
||||
}
|
||||
|
||||
/// Advances the position to the next column.
|
||||
pub(super) fn next_column(&mut self) {
|
||||
let current_line = self.pos.line_number();
|
||||
let next_column = self.pos.column_number() + 1;
|
||||
self.pos = Position::new(current_line, next_column);
|
||||
}
|
||||
|
||||
/// Advances the position to the next line.
|
||||
fn next_line(&mut self) {
|
||||
let next_line = self.pos.line_number() + 1;
|
||||
self.pos = Position::new(next_line, 1);
|
||||
}
|
||||
|
||||
/// Returns if strict mode is currently active.
|
||||
pub(super) const fn strict_mode(&self) -> bool {
|
||||
self.strict_mode
|
||||
}
|
||||
|
||||
/// Sets the current strict mode.
|
||||
pub(super) fn set_strict_mode(&mut self, strict_mode: bool) {
|
||||
self.strict_mode = strict_mode;
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> Cursor<R>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
/// Creates a new Lexer cursor.
|
||||
pub(super) fn new(inner: R) -> Self {
|
||||
Self {
|
||||
iter: InnerIter::new(inner.bytes()),
|
||||
pos: Position::new(1, 1),
|
||||
strict_mode: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a new Lexer cursor with an initial position.
|
||||
pub(super) fn with_position(inner: R, pos: Position) -> Self {
|
||||
Self {
|
||||
iter: InnerIter::new(inner.bytes()),
|
||||
pos,
|
||||
strict_mode: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Peeks the next byte.
|
||||
pub(super) fn peek(&mut self) -> Result<Option<u8>, Error> {
|
||||
let _timer = Profiler::global().start_event("cursor::peek()", "Lexing");
|
||||
|
||||
self.iter.peek_byte()
|
||||
}
|
||||
|
||||
/// Peeks the next n bytes, the maximum number of peeked bytes is 4 (n <= 4).
|
||||
pub(super) fn peek_n(&mut self, n: u8) -> Result<&[u8], Error> {
|
||||
let _timer = Profiler::global().start_event("cursor::peek_n()", "Lexing");
|
||||
|
||||
self.iter.peek_n_bytes(n)
|
||||
}
|
||||
|
||||
/// Peeks the next UTF-8 character in u32 code point.
|
||||
pub(super) fn peek_char(&mut self) -> Result<Option<u32>, Error> {
|
||||
let _timer = Profiler::global().start_event("cursor::peek_char()", "Lexing");
|
||||
|
||||
self.iter.peek_char()
|
||||
}
|
||||
|
||||
/// Compares the byte passed in to the next byte, if they match true is returned and the buffer is incremented.
|
||||
pub(super) fn next_is(&mut self, byte: u8) -> io::Result<bool> {
|
||||
let _timer = Profiler::global().start_event("cursor::next_is()", "Lexing");
|
||||
|
||||
Ok(match self.peek()? {
|
||||
Some(next) if next == byte => {
|
||||
let _ = self.next_byte()?;
|
||||
true
|
||||
}
|
||||
_ => false,
|
||||
})
|
||||
}
|
||||
|
||||
/// Applies the predicate to the next character and returns the result.
|
||||
/// Returns false if the next character is not a valid ascii or there is no next character.
|
||||
/// Otherwise returns the result from the predicate on the ascii in char
|
||||
///
|
||||
/// The buffer is not incremented.
|
||||
pub(super) fn next_is_ascii_pred<F>(&mut self, pred: &F) -> io::Result<bool>
|
||||
where
|
||||
F: Fn(char) -> bool,
|
||||
{
|
||||
let _timer = Profiler::global().start_event("cursor::next_is_ascii_pred()", "Lexing");
|
||||
|
||||
Ok(match self.peek()? {
|
||||
Some(byte) if (0..=0x7F).contains(&byte) => pred(char::from(byte)),
|
||||
Some(_) | None => false,
|
||||
})
|
||||
}
|
||||
|
||||
/// Applies the predicate to the next UTF-8 character and returns the result.
|
||||
/// Returns false if there is no next character, otherwise returns the result from the
|
||||
/// predicate on the ascii char
|
||||
///
|
||||
/// The buffer is not incremented.
|
||||
#[allow(dead_code)]
|
||||
pub(super) fn next_is_char_pred<F>(&mut self, pred: &F) -> io::Result<bool>
|
||||
where
|
||||
F: Fn(u32) -> bool,
|
||||
{
|
||||
let _timer = Profiler::global().start_event("cursor::next_is_char_pred()", "Lexing");
|
||||
|
||||
Ok(self.peek_char()?.map_or(false, pred))
|
||||
}
|
||||
|
||||
/// Fills the buffer with all bytes until the stop byte is found.
|
||||
/// Returns error when reaching the end of the buffer.
|
||||
///
|
||||
/// Note that all bytes up until the stop byte are added to the buffer, including the byte right before.
|
||||
pub(super) fn take_until(&mut self, stop: u8, buf: &mut Vec<u8>) -> io::Result<()> {
|
||||
let _timer = Profiler::global().start_event("cursor::take_until()", "Lexing");
|
||||
|
||||
loop {
|
||||
if self.next_is(stop)? {
|
||||
return Ok(());
|
||||
} else if let Some(byte) = self.next_byte()? {
|
||||
buf.push(byte);
|
||||
} else {
|
||||
return Err(io::Error::new(
|
||||
ErrorKind::UnexpectedEof,
|
||||
format!("Unexpected end of file when looking for character {stop}"),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Fills the buffer with characters until the first ascii character for which the predicate (pred) is false.
|
||||
/// It also stops when the next character is not an ascii or there is no next character.
|
||||
///
|
||||
/// Note that all characters up until the stop character are added to the buffer, including the character right before.
|
||||
pub(super) fn take_while_ascii_pred<F>(&mut self, buf: &mut Vec<u8>, pred: &F) -> io::Result<()>
|
||||
where
|
||||
F: Fn(char) -> bool,
|
||||
{
|
||||
let _timer = Profiler::global().start_event("cursor::take_while_ascii_pred()", "Lexing");
|
||||
|
||||
loop {
|
||||
if !self.next_is_ascii_pred(pred)? {
|
||||
return Ok(());
|
||||
} else if let Some(byte) = self.next_byte()? {
|
||||
buf.push(byte);
|
||||
} else {
|
||||
// next_is_pred will return false if the next value is None so the None case should already be handled.
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Fills the buffer with characters until the first character for which the predicate (pred) is false.
|
||||
/// It also stops when there is no next character.
|
||||
///
|
||||
/// Note that all characters up until the stop character are added to the buffer, including the character right before.
|
||||
#[allow(dead_code)]
|
||||
pub(super) fn take_while_char_pred<F>(&mut self, buf: &mut Vec<u8>, pred: &F) -> io::Result<()>
|
||||
where
|
||||
F: Fn(u32) -> bool,
|
||||
{
|
||||
let _timer = Profiler::global().start_event("cursor::take_while_char_pred()", "Lexing");
|
||||
|
||||
loop {
|
||||
if !self.next_is_char_pred(pred)? {
|
||||
return Ok(());
|
||||
} else if let Some(ch) = self.peek_char()? {
|
||||
for _ in 0..utf8_len(ch) {
|
||||
buf.push(
|
||||
self.next_byte()?
|
||||
.expect("already checked that the next character exists"),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// next_is_pred will return false if the next value is None so the None case should already be handled.
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// It will fill the buffer with bytes.
|
||||
///
|
||||
/// This expects for the buffer to be fully filled. If it's not, it will fail with an
|
||||
/// `UnexpectedEof` I/O error.
|
||||
pub(super) fn fill_bytes(&mut self, buf: &mut [u8]) -> io::Result<()> {
|
||||
let _timer = Profiler::global().start_event("cursor::fill_bytes()", "Lexing");
|
||||
|
||||
self.iter.fill_bytes(buf)
|
||||
}
|
||||
|
||||
/// Retrieves the next byte.
|
||||
pub(crate) fn next_byte(&mut self) -> Result<Option<u8>, Error> {
|
||||
let _timer = Profiler::global().start_event("cursor::next_byte()", "Lexing");
|
||||
|
||||
let byte = self.iter.next_byte()?;
|
||||
|
||||
match byte {
|
||||
Some(b'\r') => {
|
||||
// Try to take a newline if it's next, for windows "\r\n" newlines
|
||||
// Otherwise, treat as a Mac OS9 bare '\r' newline
|
||||
if self.peek()? == Some(b'\n') {
|
||||
let _next = self.iter.next_byte();
|
||||
}
|
||||
self.next_line();
|
||||
}
|
||||
Some(b'\n') => self.next_line(),
|
||||
Some(0xE2) => {
|
||||
// Try to match '\u{2028}' (e2 80 a8) and '\u{2029}' (e2 80 a9)
|
||||
let next_bytes = self.peek_n(2)?;
|
||||
if next_bytes == [0x80, 0xA8] || next_bytes == [0x80, 0xA9] {
|
||||
self.next_line();
|
||||
} else {
|
||||
// 0xE2 is a utf8 first byte
|
||||
self.next_column();
|
||||
}
|
||||
}
|
||||
Some(b) if utf8_is_first_byte(b) => self.next_column(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Ok(byte)
|
||||
}
|
||||
|
||||
/// Retrieves the next UTF-8 character.
|
||||
pub(crate) fn next_char(&mut self) -> Result<Option<u32>, Error> {
|
||||
let _timer = Profiler::global().start_event("cursor::next_char()", "Lexing");
|
||||
|
||||
let ch = self.iter.next_char()?;
|
||||
|
||||
match ch {
|
||||
Some(0xD) => {
|
||||
// Try to take a newline if it's next, for windows "\r\n" newlines
|
||||
// Otherwise, treat as a Mac OS9 bare '\r' newline
|
||||
if self.peek()? == Some(0xA) {
|
||||
let _next = self.iter.next_byte();
|
||||
}
|
||||
self.next_line();
|
||||
}
|
||||
// '\n' | '\u{2028}' | '\u{2029}'
|
||||
Some(0xA | 0x2028 | 0x2029) => self.next_line(),
|
||||
Some(_) => self.next_column(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Ok(ch)
|
||||
}
|
||||
}
|
||||
|
||||
/// Inner iterator for a cursor.
|
||||
#[derive(Debug)]
|
||||
#[allow(clippy::option_option)]
|
||||
struct InnerIter<R> {
|
||||
iter: Bytes<R>,
|
||||
num_peeked_bytes: u8,
|
||||
peeked_bytes: [u8; 4],
|
||||
peeked_char: Option<Option<u32>>,
|
||||
}
|
||||
|
||||
impl<R> InnerIter<R> {
|
||||
/// Creates a new inner iterator.
|
||||
const fn new(iter: Bytes<R>) -> Self {
|
||||
Self {
|
||||
iter,
|
||||
num_peeked_bytes: 0,
|
||||
peeked_bytes: [0; 4],
|
||||
peeked_char: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> InnerIter<R>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
/// It will fill the buffer with checked ascii bytes.
|
||||
///
|
||||
/// This expects for the buffer to be fully filled. If it's not, it will fail with an
|
||||
/// `UnexpectedEof` I/O error.
|
||||
fn fill_bytes(&mut self, buf: &mut [u8]) -> io::Result<()> {
|
||||
for byte in buf.iter_mut() {
|
||||
*byte = self.next_byte()?.ok_or_else(|| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::UnexpectedEof,
|
||||
"unexpected EOF when filling buffer",
|
||||
)
|
||||
})?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Increments the iter by n bytes.
|
||||
fn increment(&mut self, n: u32) -> Result<(), Error> {
|
||||
for _ in 0..n {
|
||||
if (self.next_byte()?).is_none() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Peeks the next byte.
|
||||
pub(super) fn peek_byte(&mut self) -> Result<Option<u8>, Error> {
|
||||
if self.num_peeked_bytes > 0 {
|
||||
let byte = self.peeked_bytes[0];
|
||||
Ok(Some(byte))
|
||||
} else {
|
||||
match self.iter.next().transpose()? {
|
||||
Some(byte) => {
|
||||
self.num_peeked_bytes = 1;
|
||||
self.peeked_bytes[0] = byte;
|
||||
Ok(Some(byte))
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Peeks the next n bytes, the maximum number of peeked bytes is 4 (n <= 4).
|
||||
pub(super) fn peek_n_bytes(&mut self, n: u8) -> Result<&[u8], Error> {
|
||||
while self.num_peeked_bytes < n && self.num_peeked_bytes < 4 {
|
||||
match self.iter.next().transpose()? {
|
||||
Some(byte) => {
|
||||
self.peeked_bytes[usize::from(self.num_peeked_bytes)] = byte;
|
||||
self.num_peeked_bytes += 1;
|
||||
}
|
||||
None => break,
|
||||
};
|
||||
}
|
||||
Ok(&self.peeked_bytes[..usize::from(u8::min(n, self.num_peeked_bytes))])
|
||||
}
|
||||
|
||||
/// Peeks the next unchecked character in u32 code point.
|
||||
pub(super) fn peek_char(&mut self) -> Result<Option<u32>, Error> {
|
||||
if let Some(ch) = self.peeked_char {
|
||||
Ok(ch)
|
||||
} else {
|
||||
// Decode UTF-8
|
||||
let (x, y, z, w) = match self.peek_n_bytes(4)? {
|
||||
[b, ..] if *b < 128 => {
|
||||
let char = u32::from(*b);
|
||||
self.peeked_char = Some(Some(char));
|
||||
return Ok(Some(char));
|
||||
}
|
||||
[] => {
|
||||
self.peeked_char = None;
|
||||
return Ok(None);
|
||||
}
|
||||
bytes => (
|
||||
bytes[0],
|
||||
bytes.get(1).copied(),
|
||||
bytes.get(2).copied(),
|
||||
bytes.get(3).copied(),
|
||||
),
|
||||
};
|
||||
|
||||
// Multibyte case follows
|
||||
// Decode from a byte combination out of: [[[x y] z] w]
|
||||
// NOTE: Performance is sensitive to the exact formulation here
|
||||
let init = utf8_first_byte(x, 2);
|
||||
let y = y.unwrap_or_default();
|
||||
let mut ch = utf8_acc_cont_byte(init, y);
|
||||
if x >= 0xE0 {
|
||||
// [[x y z] w] case
|
||||
// 5th bit in 0xE0 .. 0xEF is always clear, so `init` is still valid
|
||||
let z = z.unwrap_or_default();
|
||||
let y_z = utf8_acc_cont_byte(u32::from(y & CONT_MASK), z);
|
||||
ch = init << 12 | y_z;
|
||||
if x >= 0xF0 {
|
||||
// [x y z w] case
|
||||
// use only the lower 3 bits of `init`
|
||||
let w = w.unwrap_or_default();
|
||||
ch = (init & 7) << 18 | utf8_acc_cont_byte(y_z, w);
|
||||
}
|
||||
};
|
||||
|
||||
self.peeked_char = Some(Some(ch));
|
||||
Ok(Some(ch))
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves the next byte
|
||||
fn next_byte(&mut self) -> io::Result<Option<u8>> {
|
||||
self.peeked_char = None;
|
||||
if self.num_peeked_bytes > 0 {
|
||||
let byte = self.peeked_bytes[0];
|
||||
self.num_peeked_bytes -= 1;
|
||||
self.peeked_bytes.rotate_left(1);
|
||||
Ok(Some(byte))
|
||||
} else {
|
||||
self.iter.next().transpose()
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves the next unchecked char in u32 code point.
|
||||
fn next_char(&mut self) -> io::Result<Option<u32>> {
|
||||
if let Some(ch) = self.peeked_char.take() {
|
||||
if let Some(c) = ch {
|
||||
self.increment(utf8_len(c))?;
|
||||
}
|
||||
return Ok(ch);
|
||||
}
|
||||
|
||||
// Decode UTF-8
|
||||
let x = match self.next_byte()? {
|
||||
Some(b) if b < 128 => return Ok(Some(u32::from(b))),
|
||||
Some(b) => b,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
// Multibyte case follows
|
||||
// Decode from a byte combination out of: [[[x y] z] w]
|
||||
// NOTE: Performance is sensitive to the exact formulation here
|
||||
let init = utf8_first_byte(x, 2);
|
||||
let y = unwrap_or_0(self.next_byte()?);
|
||||
let mut ch = utf8_acc_cont_byte(init, y);
|
||||
if x >= 0xE0 {
|
||||
// [[x y z] w] case
|
||||
// 5th bit in 0xE0 .. 0xEF is always clear, so `init` is still valid
|
||||
let z = unwrap_or_0(self.next_byte()?);
|
||||
let y_z = utf8_acc_cont_byte(u32::from(y & CONT_MASK), z);
|
||||
ch = init << 12 | y_z;
|
||||
if x >= 0xF0 {
|
||||
// [x y z w] case
|
||||
// use only the lower 3 bits of `init`
|
||||
let w = unwrap_or_0(self.next_byte()?);
|
||||
ch = (init & 7) << 18 | utf8_acc_cont_byte(y_z, w);
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Some(ch))
|
||||
}
|
||||
}
|
||||
|
||||
/// Mask of the value bits of a continuation byte.
|
||||
const CONT_MASK: u8 = 0b0011_1111;
|
||||
|
||||
/// Returns the initial codepoint accumulator for the first byte.
|
||||
/// The first byte is special, only want bottom 5 bits for width 2, 4 bits
|
||||
/// for width 3, and 3 bits for width 4.
|
||||
fn utf8_first_byte(byte: u8, width: u32) -> u32 {
|
||||
u32::from(byte & (0x7F >> width))
|
||||
}
|
||||
|
||||
/// Returns the value of `ch` updated with continuation byte `byte`.
|
||||
fn utf8_acc_cont_byte(ch: u32, byte: u8) -> u32 {
|
||||
(ch << 6) | u32::from(byte & CONT_MASK)
|
||||
}
|
||||
|
||||
/// Checks whether the byte is a UTF-8 first byte (i.e., ascii byte or starts with the
|
||||
/// bits `11`).
|
||||
const fn utf8_is_first_byte(byte: u8) -> bool {
|
||||
byte <= 0x7F || (byte >> 6) == 0x11
|
||||
}
|
||||
|
||||
fn unwrap_or_0(opt: Option<u8>) -> u8 {
|
||||
opt.unwrap_or(0)
|
||||
}
|
||||
|
||||
const fn utf8_len(ch: u32) -> u32 {
|
||||
if ch <= 0x7F {
|
||||
1
|
||||
} else if ch <= 0x7FF {
|
||||
2
|
||||
} else if ch <= 0xFFFF {
|
||||
3
|
||||
} else {
|
||||
4
|
||||
}
|
||||
}
|
||||
60
javascript-engine/external/boa/boa_parser/src/lexer/error.rs
vendored
Normal file
60
javascript-engine/external/boa/boa_parser/src/lexer/error.rs
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
//! This module contains the errors used by the lexer.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [ECMAScript reference][spec]
|
||||
//!
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-native-error-types-used-in-this-standard
|
||||
|
||||
use boa_ast::Position;
|
||||
use std::{error::Error as StdError, fmt, io};
|
||||
|
||||
/// An error that occurred during the lexing.
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
/// An IO error is raised to indicate an issue when the lexer is reading data that isn't
|
||||
/// related to the sourcecode itself.
|
||||
IO(io::Error),
|
||||
|
||||
/// Indicates a parsing error due to the presence, or lack of, one or more characters.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript reference][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-native-error-types-used-in-this-standard-syntaxerror
|
||||
Syntax(Box<str>, Position),
|
||||
}
|
||||
|
||||
impl From<io::Error> for Error {
|
||||
fn from(err: io::Error) -> Self {
|
||||
Self::IO(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error {
|
||||
/// Creates a new syntax error.
|
||||
pub(super) fn syntax<M, P>(err: M, pos: P) -> Self
|
||||
where
|
||||
M: Into<Box<str>>,
|
||||
P: Into<Position>,
|
||||
{
|
||||
Self::Syntax(err.into(), pos.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::IO(e) => write!(f, "I/O error: {e}"),
|
||||
Self::Syntax(e, pos) => write!(f, "Syntax Error: {e} at position: {pos}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl StdError for Error {
|
||||
fn source(&self) -> Option<&(dyn StdError + 'static)> {
|
||||
match self {
|
||||
Self::IO(err) => Some(err),
|
||||
Self::Syntax(_, _) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
137
javascript-engine/external/boa/boa_parser/src/lexer/identifier.rs
vendored
Normal file
137
javascript-engine/external/boa/boa_parser/src/lexer/identifier.rs
vendored
Normal file
@@ -0,0 +1,137 @@
|
||||
//! This module implements lexing for identifiers (foo, myvar, etc.) used in ECMAScript.
|
||||
|
||||
use crate::lexer::{Cursor, Error, StringLiteral, Token, TokenKind, Tokenizer};
|
||||
use boa_ast::{Keyword, Position, Span};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use boa_unicode::UnicodeProperties;
|
||||
use std::io::Read;
|
||||
|
||||
/// Identifier lexing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript reference][spec]
|
||||
/// - [MDN documentation][mdn]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-Identifier
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Glossary/Identifier
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct Identifier {
|
||||
init: char,
|
||||
}
|
||||
|
||||
impl Identifier {
|
||||
/// Creates a new identifier/keyword lexer.
|
||||
pub(super) const fn new(init: char) -> Self {
|
||||
Self { init }
|
||||
}
|
||||
|
||||
/// Checks if a character is `IdentifierStart` as per ECMAScript standards.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript reference][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-names-and-keywords
|
||||
pub(super) fn is_identifier_start(ch: u32) -> bool {
|
||||
matches!(ch, 0x0024 /* $ */ | 0x005F /* _ */)
|
||||
|| char::try_from(ch).map_or(false, char::is_id_start)
|
||||
}
|
||||
|
||||
/// Checks if a character is `IdentifierPart` as per ECMAScript standards.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript reference][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-names-and-keywords
|
||||
fn is_identifier_part(ch: u32) -> bool {
|
||||
matches!(
|
||||
ch,
|
||||
0x0024 /* $ */ | 0x005F /* _ */ | 0x200C /* <ZWNJ> */ | 0x200D /* <ZWJ> */
|
||||
) || char::try_from(ch).map_or(false, char::is_id_continue)
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> Tokenizer<R> for Identifier {
|
||||
fn lex(
|
||||
&mut self,
|
||||
cursor: &mut Cursor<R>,
|
||||
start_pos: Position,
|
||||
interner: &mut Interner,
|
||||
) -> Result<Token, Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
let _timer = Profiler::global().start_event("Identifier", "Lexing");
|
||||
|
||||
let (identifier_name, contains_escaped_chars) =
|
||||
Self::take_identifier_name(cursor, start_pos, self.init)?;
|
||||
|
||||
let token_kind = match identifier_name.parse() {
|
||||
Ok(Keyword::True) => TokenKind::BooleanLiteral(true),
|
||||
Ok(Keyword::False) => TokenKind::BooleanLiteral(false),
|
||||
Ok(Keyword::Null) => TokenKind::NullLiteral,
|
||||
Ok(keyword) => TokenKind::Keyword((keyword, contains_escaped_chars)),
|
||||
_ => TokenKind::identifier(interner.get_or_intern(identifier_name.as_str())),
|
||||
};
|
||||
|
||||
Ok(Token::new(token_kind, Span::new(start_pos, cursor.pos())))
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier {
|
||||
pub(super) fn take_identifier_name<R>(
|
||||
cursor: &mut Cursor<R>,
|
||||
start_pos: Position,
|
||||
init: char,
|
||||
) -> Result<(String, bool), Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
let _timer = Profiler::global().start_event("Identifier::take_identifier_name", "Lexing");
|
||||
|
||||
let mut contains_escaped_chars = false;
|
||||
let mut identifier_name = if init == '\\' && cursor.next_is(b'u')? {
|
||||
let ch = StringLiteral::take_unicode_escape_sequence(cursor, start_pos)?;
|
||||
|
||||
if Self::is_identifier_start(ch) {
|
||||
contains_escaped_chars = true;
|
||||
String::from(
|
||||
char::try_from(ch)
|
||||
.expect("all identifier starts must be convertible to strings"),
|
||||
)
|
||||
} else {
|
||||
return Err(Error::Syntax("invalid identifier start".into(), start_pos));
|
||||
}
|
||||
} else {
|
||||
// The caller guarantees that `init` is a valid identifier start
|
||||
String::from(init)
|
||||
};
|
||||
|
||||
loop {
|
||||
let ch = match cursor.peek_char()? {
|
||||
Some(0x005C /* \ */) if cursor.peek_n(2)?.get(1) == Some(&0x75) /* u */ => {
|
||||
let pos = cursor.pos();
|
||||
let _next = cursor.next_byte();
|
||||
let _next = cursor.next_byte();
|
||||
let ch = StringLiteral::take_unicode_escape_sequence(cursor, pos)?;
|
||||
|
||||
if Self::is_identifier_part(ch) {
|
||||
contains_escaped_chars = true;
|
||||
ch
|
||||
} else {
|
||||
return Err(Error::Syntax("invalid identifier part".into(), pos));
|
||||
}
|
||||
}
|
||||
Some(ch) if Self::is_identifier_part(ch) => {
|
||||
let _ = cursor.next_char()?;
|
||||
ch
|
||||
},
|
||||
_ => break,
|
||||
};
|
||||
|
||||
identifier_name.push(char::try_from(ch).expect("checked character value"));
|
||||
}
|
||||
|
||||
Ok((identifier_name, contains_escaped_chars))
|
||||
}
|
||||
}
|
||||
341
javascript-engine/external/boa/boa_parser/src/lexer/mod.rs
vendored
Normal file
341
javascript-engine/external/boa/boa_parser/src/lexer/mod.rs
vendored
Normal file
@@ -0,0 +1,341 @@
|
||||
//! Boa's lexical analyzer(Lexer) for ECMAScript source code.
|
||||
//!
|
||||
//! The Lexer splits its input source code into a sequence of input elements called tokens,
|
||||
//! represented by the [Token] structure. It also removes
|
||||
//! whitespace and comments and attaches them to the next token.
|
||||
//!
|
||||
//! This is tightly coupled with the parser due to the javascript goal-symbol requirements
|
||||
//! as documented by the spec.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [ECMAScript reference][spec]
|
||||
//!
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-ecmascript-language-lexical-grammar
|
||||
|
||||
pub mod error;
|
||||
pub mod regex;
|
||||
pub mod token;
|
||||
|
||||
mod comment;
|
||||
mod cursor;
|
||||
mod identifier;
|
||||
mod number;
|
||||
mod operator;
|
||||
mod private_identifier;
|
||||
mod spread;
|
||||
mod string;
|
||||
mod template;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use self::{
|
||||
comment::{HashbangComment, MultiLineComment, SingleLineComment},
|
||||
cursor::Cursor,
|
||||
identifier::Identifier,
|
||||
number::NumberLiteral,
|
||||
operator::Operator,
|
||||
private_identifier::PrivateIdentifier,
|
||||
regex::RegexLiteral,
|
||||
spread::SpreadLiteral,
|
||||
string::StringLiteral,
|
||||
template::TemplateLiteral,
|
||||
};
|
||||
use boa_ast::{Position, Punctuator, Span};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
pub use self::{
|
||||
error::Error,
|
||||
token::{Token, TokenKind},
|
||||
};
|
||||
|
||||
trait Tokenizer<R> {
|
||||
/// Lexes the next token.
|
||||
fn lex(
|
||||
&mut self,
|
||||
cursor: &mut Cursor<R>,
|
||||
start_pos: Position,
|
||||
interner: &mut Interner,
|
||||
) -> Result<Token, Error>
|
||||
where
|
||||
R: Read;
|
||||
}
|
||||
|
||||
/// Lexer or tokenizer for the Boa JavaScript Engine.
|
||||
#[derive(Debug)]
|
||||
pub struct Lexer<R> {
|
||||
cursor: Cursor<R>,
|
||||
goal_symbol: InputElement,
|
||||
}
|
||||
|
||||
impl<R> Lexer<R> {
|
||||
/// Checks if a character is whitespace as per ECMAScript standards.
|
||||
///
|
||||
/// The Rust `char::is_whitespace` function and the ECMAScript standard use different sets of
|
||||
/// characters as whitespaces:
|
||||
/// * Rust uses `\p{White_Space}`,
|
||||
/// * ECMAScript standard uses `\{Space_Separator}` + `\u{0009}`, `\u{000B}`, `\u{000C}`, `\u{FEFF}`
|
||||
///
|
||||
/// [More information](https://tc39.es/ecma262/#table-32)
|
||||
const fn is_whitespace(ch: u32) -> bool {
|
||||
matches!(
|
||||
ch,
|
||||
0x0020 | 0x0009 | 0x000B | 0x000C | 0x00A0 | 0xFEFF |
|
||||
// Unicode Space_Seperator category (minus \u{0020} and \u{00A0} which are allready stated above)
|
||||
0x1680 | 0x2000..=0x200A | 0x202F | 0x205F | 0x3000
|
||||
)
|
||||
}
|
||||
|
||||
/// Sets the goal symbol for the lexer.
|
||||
pub(crate) fn set_goal(&mut self, elm: InputElement) {
|
||||
self.goal_symbol = elm;
|
||||
}
|
||||
|
||||
/// Gets the goal symbol the lexer is currently using.
|
||||
pub(crate) const fn get_goal(&self) -> InputElement {
|
||||
self.goal_symbol
|
||||
}
|
||||
|
||||
/// Returns if strict mode is currently active.
|
||||
pub(super) const fn strict_mode(&self) -> bool {
|
||||
self.cursor.strict_mode()
|
||||
}
|
||||
|
||||
/// Sets the current strict mode.
|
||||
pub(super) fn set_strict_mode(&mut self, strict_mode: bool) {
|
||||
self.cursor.set_strict_mode(strict_mode);
|
||||
}
|
||||
|
||||
/// Creates a new lexer.
|
||||
pub fn new(reader: R) -> Self
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
Self {
|
||||
cursor: Cursor::new(reader),
|
||||
goal_symbol: InputElement::default(),
|
||||
}
|
||||
}
|
||||
|
||||
// Handles lexing of a token starting '/' with the '/' already being consumed.
|
||||
// This could be a divide symbol or the start of a regex.
|
||||
//
|
||||
// A '/' symbol can always be a comment but if as tested above it is not then
|
||||
// that means it could be multiple different tokens depending on the input token.
|
||||
//
|
||||
// As per https://tc39.es/ecma262/#sec-ecmascript-language-lexical-grammar
|
||||
pub(crate) fn lex_slash_token(
|
||||
&mut self,
|
||||
start: Position,
|
||||
interner: &mut Interner,
|
||||
) -> Result<Token, Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
let _timer = Profiler::global().start_event("lex_slash_token", "Lexing");
|
||||
|
||||
if let Some(c) = self.cursor.peek()? {
|
||||
match c {
|
||||
b'/' => {
|
||||
self.cursor.next_byte()?.expect("/ token vanished"); // Consume the '/'
|
||||
SingleLineComment.lex(&mut self.cursor, start, interner)
|
||||
}
|
||||
b'*' => {
|
||||
self.cursor.next_byte()?.expect("* token vanished"); // Consume the '*'
|
||||
MultiLineComment.lex(&mut self.cursor, start, interner)
|
||||
}
|
||||
ch => {
|
||||
match self.get_goal() {
|
||||
InputElement::Div | InputElement::TemplateTail => {
|
||||
// Only div punctuator allowed, regex not.
|
||||
|
||||
if ch == b'=' {
|
||||
// Indicates this is an AssignDiv.
|
||||
self.cursor.next_byte()?.expect("= token vanished"); // Consume the '='
|
||||
Ok(Token::new(
|
||||
Punctuator::AssignDiv.into(),
|
||||
Span::new(start, self.cursor.pos()),
|
||||
))
|
||||
} else {
|
||||
Ok(Token::new(
|
||||
Punctuator::Div.into(),
|
||||
Span::new(start, self.cursor.pos()),
|
||||
))
|
||||
}
|
||||
}
|
||||
InputElement::RegExp => {
|
||||
// Can be a regular expression.
|
||||
RegexLiteral.lex(&mut self.cursor, start, interner)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Err(Error::syntax(
|
||||
"Abrupt end: Expecting Token /,*,= or regex",
|
||||
start,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves the next token from the lexer.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Will return `Err` on invalid tokens and invalid reads of the bytes being lexed.
|
||||
// We intentionally don't implement Iterator trait as Result<Option> is cleaner to handle.
|
||||
#[allow(clippy::should_implement_trait)]
|
||||
pub fn next(&mut self, interner: &mut Interner) -> Result<Option<Token>, Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
let _timer = Profiler::global().start_event("next()", "Lexing");
|
||||
|
||||
let (start, next_ch) = loop {
|
||||
let start = self.cursor.pos();
|
||||
if let Some(next_ch) = self.cursor.next_char()? {
|
||||
// Ignore whitespace
|
||||
if !Self::is_whitespace(next_ch) {
|
||||
break (start, next_ch);
|
||||
}
|
||||
} else {
|
||||
return Ok(None);
|
||||
}
|
||||
};
|
||||
|
||||
//handle hashbang here so the below match block still throws error on
|
||||
//# if position isn't (1, 1)
|
||||
if start.column_number() == 1 && start.line_number() == 1 && next_ch == 0x23 {
|
||||
if let Some(hashbang_peek) = self.cursor.peek()? {
|
||||
if hashbang_peek == 0x21 {
|
||||
let _token = HashbangComment.lex(&mut self.cursor, start, interner);
|
||||
return self.next(interner);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if let Ok(c) = char::try_from(next_ch) {
|
||||
let token = match c {
|
||||
'\r' | '\n' | '\u{2028}' | '\u{2029}' => Ok(Token::new(
|
||||
TokenKind::LineTerminator,
|
||||
Span::new(start, self.cursor.pos()),
|
||||
)),
|
||||
'"' | '\'' => StringLiteral::new(c).lex(&mut self.cursor, start, interner),
|
||||
'`' => TemplateLiteral.lex(&mut self.cursor, start, interner),
|
||||
';' => Ok(Token::new(
|
||||
Punctuator::Semicolon.into(),
|
||||
Span::new(start, self.cursor.pos()),
|
||||
)),
|
||||
':' => Ok(Token::new(
|
||||
Punctuator::Colon.into(),
|
||||
Span::new(start, self.cursor.pos()),
|
||||
)),
|
||||
'.' => {
|
||||
if self.cursor.peek()?.as_ref().map(u8::is_ascii_digit) == Some(true) {
|
||||
NumberLiteral::new(b'.').lex(&mut self.cursor, start, interner)
|
||||
} else {
|
||||
SpreadLiteral::new().lex(&mut self.cursor, start, interner)
|
||||
}
|
||||
}
|
||||
'(' => Ok(Token::new(
|
||||
Punctuator::OpenParen.into(),
|
||||
Span::new(start, self.cursor.pos()),
|
||||
)),
|
||||
')' => Ok(Token::new(
|
||||
Punctuator::CloseParen.into(),
|
||||
Span::new(start, self.cursor.pos()),
|
||||
)),
|
||||
',' => Ok(Token::new(
|
||||
Punctuator::Comma.into(),
|
||||
Span::new(start, self.cursor.pos()),
|
||||
)),
|
||||
'{' => Ok(Token::new(
|
||||
Punctuator::OpenBlock.into(),
|
||||
Span::new(start, self.cursor.pos()),
|
||||
)),
|
||||
'}' => Ok(Token::new(
|
||||
Punctuator::CloseBlock.into(),
|
||||
Span::new(start, self.cursor.pos()),
|
||||
)),
|
||||
'[' => Ok(Token::new(
|
||||
Punctuator::OpenBracket.into(),
|
||||
Span::new(start, self.cursor.pos()),
|
||||
)),
|
||||
']' => Ok(Token::new(
|
||||
Punctuator::CloseBracket.into(),
|
||||
Span::new(start, self.cursor.pos()),
|
||||
)),
|
||||
'#' => PrivateIdentifier::new().lex(&mut self.cursor, start, interner),
|
||||
'/' => self.lex_slash_token(start, interner),
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
'=' | '*' | '+' | '-' | '%' | '|' | '&' | '^' | '<' | '>' | '!' | '~' | '?' => {
|
||||
Operator::new(next_ch as u8).lex(&mut self.cursor, start, interner)
|
||||
}
|
||||
'\\' if self.cursor.peek()? == Some(b'u') => {
|
||||
Identifier::new(c).lex(&mut self.cursor, start, interner)
|
||||
}
|
||||
_ if Identifier::is_identifier_start(c as u32) => {
|
||||
Identifier::new(c).lex(&mut self.cursor, start, interner)
|
||||
}
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
_ if c.is_ascii_digit() => {
|
||||
NumberLiteral::new(next_ch as u8).lex(&mut self.cursor, start, interner)
|
||||
}
|
||||
_ => {
|
||||
let details = format!(
|
||||
"unexpected '{c}' at line {}, column {}",
|
||||
start.line_number(),
|
||||
start.column_number()
|
||||
);
|
||||
Err(Error::syntax(details, start))
|
||||
}
|
||||
}?;
|
||||
|
||||
if token.kind() == &TokenKind::Comment {
|
||||
// Skip comment
|
||||
self.next(interner)
|
||||
} else {
|
||||
Ok(Some(token))
|
||||
}
|
||||
} else {
|
||||
Err(Error::syntax(
|
||||
format!(
|
||||
"unexpected utf-8 char '\\u{next_ch}' at line {}, column {}",
|
||||
start.line_number(),
|
||||
start.column_number()
|
||||
),
|
||||
start,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Performs the lexing of a template literal.
|
||||
pub(crate) fn lex_template(
|
||||
&mut self,
|
||||
start: Position,
|
||||
interner: &mut Interner,
|
||||
) -> Result<Token, Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
TemplateLiteral.lex(&mut self.cursor, start, interner)
|
||||
}
|
||||
}
|
||||
|
||||
/// ECMAScript goal symbols.
|
||||
///
|
||||
/// <https://tc39.es/ecma262/#sec-ecmascript-language-lexical-grammar>
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) enum InputElement {
|
||||
Div,
|
||||
RegExp,
|
||||
TemplateTail,
|
||||
}
|
||||
|
||||
impl Default for InputElement {
|
||||
fn default() -> Self {
|
||||
Self::RegExp
|
||||
}
|
||||
}
|
||||
415
javascript-engine/external/boa/boa_parser/src/lexer/number.rs
vendored
Normal file
415
javascript-engine/external/boa/boa_parser/src/lexer/number.rs
vendored
Normal file
@@ -0,0 +1,415 @@
|
||||
//! This module implements lexing for number literals (123, 787) used in ECMAScript.
|
||||
|
||||
use crate::lexer::{token::Numeric, Cursor, Error, Token, TokenKind, Tokenizer};
|
||||
use boa_ast::{Position, Span};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use num_bigint::BigInt;
|
||||
use num_traits::{ToPrimitive, Zero};
|
||||
use std::{io::Read, str};
|
||||
|
||||
/// Number literal lexing.
|
||||
///
|
||||
/// Assumes the digit is consumed by the cursor (stored in init).
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript reference][spec]
|
||||
/// - [MDN documentation][mdn]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-literals-numeric-literals
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct NumberLiteral {
|
||||
init: u8,
|
||||
}
|
||||
|
||||
impl NumberLiteral {
|
||||
/// Creates a new string literal lexer.
|
||||
pub(super) const fn new(init: u8) -> Self {
|
||||
Self { init }
|
||||
}
|
||||
}
|
||||
|
||||
/// This is a helper structure
|
||||
///
|
||||
/// This structure helps with identifying what numerical type it is and what base is it.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
enum NumericKind {
|
||||
Rational,
|
||||
Integer(u32),
|
||||
BigInt(u32),
|
||||
}
|
||||
|
||||
impl NumericKind {
|
||||
/// Get the base of the number kind.
|
||||
const fn base(self) -> u32 {
|
||||
match self {
|
||||
Self::Rational => 10,
|
||||
Self::Integer(base) | Self::BigInt(base) => base,
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts `self` to `BigInt` kind.
|
||||
fn to_bigint(self) -> Self {
|
||||
match self {
|
||||
Self::Rational => unreachable!("can not convert rational number to BigInt"),
|
||||
Self::Integer(base) | Self::BigInt(base) => Self::BigInt(base),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn take_signed_integer<R>(
|
||||
buf: &mut Vec<u8>,
|
||||
cursor: &mut Cursor<R>,
|
||||
kind: NumericKind,
|
||||
) -> Result<(), Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
// The next part must be SignedInteger.
|
||||
// This is optionally a '+' or '-' followed by 1 or more DecimalDigits.
|
||||
match cursor.next_byte()? {
|
||||
Some(b'+') => {
|
||||
buf.push(b'+');
|
||||
if !cursor.next_is_ascii_pred(&|ch| ch.is_digit(kind.base()))? {
|
||||
// A digit must follow the + or - symbol.
|
||||
return Err(Error::syntax("No digit found after + symbol", cursor.pos()));
|
||||
}
|
||||
}
|
||||
Some(b'-') => {
|
||||
buf.push(b'-');
|
||||
if !cursor.next_is_ascii_pred(&|ch| ch.is_digit(kind.base()))? {
|
||||
// A digit must follow the + or - symbol.
|
||||
return Err(Error::syntax("No digit found after - symbol", cursor.pos()));
|
||||
}
|
||||
}
|
||||
Some(byte) => {
|
||||
let ch = char::from(byte);
|
||||
if ch.is_ascii() && ch.is_digit(kind.base()) {
|
||||
buf.push(byte);
|
||||
} else {
|
||||
return Err(Error::syntax(
|
||||
"When lexing exponential value found unexpected char",
|
||||
cursor.pos(),
|
||||
));
|
||||
}
|
||||
}
|
||||
None => {
|
||||
return Err(Error::syntax(
|
||||
"Abrupt end: No exponential value found",
|
||||
cursor.pos(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Consume the decimal digits.
|
||||
take_integer(buf, cursor, kind, true)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn take_integer<R>(
|
||||
buf: &mut Vec<u8>,
|
||||
cursor: &mut Cursor<R>,
|
||||
kind: NumericKind,
|
||||
separator_allowed: bool,
|
||||
) -> Result<(), Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
let mut prev_is_underscore = false;
|
||||
let mut pos = cursor.pos();
|
||||
while cursor.next_is_ascii_pred(&|c| c.is_digit(kind.base()) || c == '_')? {
|
||||
pos = cursor.pos();
|
||||
match cursor.next_byte()? {
|
||||
Some(c) if char::from(c).is_digit(kind.base()) => {
|
||||
prev_is_underscore = false;
|
||||
buf.push(c);
|
||||
}
|
||||
Some(b'_') if separator_allowed => {
|
||||
if prev_is_underscore {
|
||||
return Err(Error::syntax(
|
||||
"only one underscore is allowed as numeric separator",
|
||||
cursor.pos(),
|
||||
));
|
||||
}
|
||||
prev_is_underscore = true;
|
||||
}
|
||||
Some(b'_') if !separator_allowed => {
|
||||
return Err(Error::syntax("separator is not allowed", pos));
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
if prev_is_underscore {
|
||||
return Err(Error::syntax(
|
||||
"underscores are not allowed at the end of numeric literals",
|
||||
pos,
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Utility function for checking the `NumericLiteral` is not followed by an `IdentifierStart` or `DecimalDigit` character.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript Specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-literals-numeric-literals
|
||||
fn check_after_numeric_literal<R>(cursor: &mut Cursor<R>) -> Result<(), Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
if cursor.next_is_ascii_pred(&|ch| ch.is_ascii_alphanumeric() || ch == '$' || ch == '_')? {
|
||||
Err(Error::syntax(
|
||||
"a numeric literal must not be followed by an alphanumeric, $ or _ characters",
|
||||
cursor.pos(),
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> Tokenizer<R> for NumberLiteral {
|
||||
fn lex(
|
||||
&mut self,
|
||||
cursor: &mut Cursor<R>,
|
||||
start_pos: Position,
|
||||
_interner: &mut Interner,
|
||||
) -> Result<Token, Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
let _timer = Profiler::global().start_event("NumberLiteral", "Lexing");
|
||||
|
||||
let mut buf = vec![self.init];
|
||||
|
||||
// Default assume the number is a base 10 integer.
|
||||
let mut kind = NumericKind::Integer(10);
|
||||
|
||||
let c = cursor.peek();
|
||||
let mut legacy_octal = false;
|
||||
|
||||
if self.init == b'0' {
|
||||
if let Some(ch) = c? {
|
||||
match ch {
|
||||
b'x' | b'X' => {
|
||||
// Remove the initial '0' from buffer.
|
||||
cursor.next_char()?.expect("x or X character vanished");
|
||||
buf.pop();
|
||||
|
||||
// HexIntegerLiteral
|
||||
kind = NumericKind::Integer(16);
|
||||
|
||||
// Checks if the next char after '0x' is a digit of that base. if not return an error.
|
||||
if !cursor.next_is_ascii_pred(&|ch| ch.is_ascii_hexdigit())? {
|
||||
return Err(Error::syntax(
|
||||
"expected hexadecimal digit after number base prefix",
|
||||
cursor.pos(),
|
||||
));
|
||||
}
|
||||
}
|
||||
b'o' | b'O' => {
|
||||
// Remove the initial '0' from buffer.
|
||||
cursor.next_char()?.expect("o or O character vanished");
|
||||
buf.pop();
|
||||
|
||||
// OctalIntegerLiteral
|
||||
kind = NumericKind::Integer(8);
|
||||
|
||||
// Checks if the next char after '0o' is a digit of that base. if not return an error.
|
||||
if !cursor.next_is_ascii_pred(&|ch| ch.is_digit(8))? {
|
||||
return Err(Error::syntax(
|
||||
"expected octal digit after number base prefix",
|
||||
cursor.pos(),
|
||||
));
|
||||
}
|
||||
}
|
||||
b'b' | b'B' => {
|
||||
// Remove the initial '0' from buffer.
|
||||
cursor.next_char()?.expect("b or B character vanished");
|
||||
buf.pop();
|
||||
|
||||
// BinaryIntegerLiteral
|
||||
kind = NumericKind::Integer(2);
|
||||
|
||||
// Checks if the next char after '0b' is a digit of that base. if not return an error.
|
||||
if !cursor.next_is_ascii_pred(&|ch| ch.is_digit(2))? {
|
||||
return Err(Error::syntax(
|
||||
"expected binary digit after number base prefix",
|
||||
cursor.pos(),
|
||||
));
|
||||
}
|
||||
}
|
||||
b'n' => {
|
||||
cursor.next_char()?.expect("n character vanished");
|
||||
|
||||
// DecimalBigIntegerLiteral '0n'
|
||||
return Ok(Token::new(
|
||||
TokenKind::NumericLiteral(Numeric::BigInt(BigInt::zero().into())),
|
||||
Span::new(start_pos, cursor.pos()),
|
||||
));
|
||||
}
|
||||
byte => {
|
||||
legacy_octal = true;
|
||||
let ch = char::from(byte);
|
||||
if ch.is_digit(8) {
|
||||
// LegacyOctalIntegerLiteral, or a number with leading 0s.
|
||||
if cursor.strict_mode() {
|
||||
// LegacyOctalIntegerLiteral is forbidden with strict mode true.
|
||||
return Err(Error::syntax(
|
||||
"implicit octal literals are not allowed in strict mode",
|
||||
start_pos,
|
||||
));
|
||||
}
|
||||
|
||||
// Remove the initial '0' from buffer.
|
||||
buf.pop();
|
||||
|
||||
buf.push(cursor.next_byte()?.expect("'0' character vanished"));
|
||||
|
||||
take_integer(&mut buf, cursor, NumericKind::Integer(8), false)?;
|
||||
|
||||
if !cursor.next_is_ascii_pred(&|c| c.is_ascii_digit() || c == '_')? {
|
||||
// LegacyOctalIntegerLiteral
|
||||
kind = NumericKind::Integer(8);
|
||||
}
|
||||
} else if ch.is_ascii_digit() {
|
||||
// Indicates a numerical digit comes after then 0 but it isn't an octal digit
|
||||
// so therefore this must be a number with an unneeded leading 0. This is
|
||||
// forbidden in strict mode.
|
||||
if cursor.strict_mode() {
|
||||
return Err(Error::syntax(
|
||||
"leading 0's are not allowed in strict mode",
|
||||
start_pos,
|
||||
));
|
||||
}
|
||||
} // Else indicates that the symbol is a non-number.
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// DecimalLiteral lexing.
|
||||
// Indicates that the number is just a single 0.
|
||||
return Ok(Token::new(
|
||||
TokenKind::NumericLiteral(Numeric::Integer(0)),
|
||||
Span::new(start_pos, cursor.pos()),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let next = if self.init == b'.' {
|
||||
Some(b'.')
|
||||
} else {
|
||||
// Consume digits and separators until a non-digit non-separator
|
||||
// character is encountered or all the characters are consumed.
|
||||
take_integer(&mut buf, cursor, kind, !legacy_octal)?;
|
||||
cursor.peek()?
|
||||
};
|
||||
|
||||
// The non-digit character could be:
|
||||
// 'n' To indicate a BigIntLiteralSuffix.
|
||||
// '.' To indicate a decimal separator.
|
||||
// 'e' | 'E' To indicate an ExponentPart.
|
||||
match next {
|
||||
Some(b'n') => {
|
||||
// DecimalBigIntegerLiteral
|
||||
// Lexing finished.
|
||||
// Consume the n
|
||||
if legacy_octal {
|
||||
return Err(Error::syntax(
|
||||
"'n' suffix not allowed in octal representation",
|
||||
cursor.pos(),
|
||||
));
|
||||
}
|
||||
cursor.next_byte()?.expect("n character vanished");
|
||||
|
||||
kind = kind.to_bigint();
|
||||
}
|
||||
Some(b'.') => {
|
||||
if kind.base() == 10 {
|
||||
// Only base 10 numbers can have a decimal separator.
|
||||
// Number literal lexing finished if a . is found for a number in a different base.
|
||||
if self.init != b'.' {
|
||||
cursor.next_byte()?.expect("'.' token vanished");
|
||||
buf.push(b'.'); // Consume the .
|
||||
}
|
||||
kind = NumericKind::Rational;
|
||||
|
||||
if cursor.peek()? == Some(b'_') {
|
||||
return Err(Error::syntax(
|
||||
"numeric separator not allowed after '.'",
|
||||
cursor.pos(),
|
||||
));
|
||||
}
|
||||
|
||||
// Consume digits and separators until a non-digit non-separator
|
||||
// character is encountered or all the characters are consumed.
|
||||
take_integer(&mut buf, cursor, kind, true)?;
|
||||
|
||||
// The non-digit character at this point must be an 'e' or 'E' to indicate an Exponent Part.
|
||||
// Another '.' or 'n' is not allowed.
|
||||
match cursor.peek()? {
|
||||
Some(b'e' | b'E') => {
|
||||
// Consume the ExponentIndicator.
|
||||
cursor.next_byte()?.expect("e or E token vanished");
|
||||
|
||||
buf.push(b'E');
|
||||
|
||||
take_signed_integer(&mut buf, cursor, kind)?;
|
||||
}
|
||||
Some(_) | None => {
|
||||
// Finished lexing.
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(b'e' | b'E') => {
|
||||
kind = NumericKind::Rational;
|
||||
cursor.next_byte()?.expect("e or E character vanished"); // Consume the ExponentIndicator.
|
||||
buf.push(b'E');
|
||||
take_signed_integer(&mut buf, cursor, kind)?;
|
||||
}
|
||||
Some(_) | None => {
|
||||
// Indicates lexing finished.
|
||||
}
|
||||
}
|
||||
|
||||
check_after_numeric_literal(cursor)?;
|
||||
|
||||
let num_str = unsafe { str::from_utf8_unchecked(buf.as_slice()) };
|
||||
let num = match kind {
|
||||
NumericKind::BigInt(base) => {
|
||||
Numeric::BigInt(
|
||||
BigInt::parse_bytes(num_str.as_bytes(), base).expect("Could not convert to BigInt").into()
|
||||
)
|
||||
}
|
||||
// casting precisely to check if the float doesn't lose info on truncation
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
NumericKind::Rational /* base: 10 */ => {
|
||||
let val: f64 = fast_float::parse(num_str).expect("Failed to parse float after checks");
|
||||
let int_val = val as i32;
|
||||
|
||||
// The truncated float should be identically to the non-truncated float for the conversion to be loss-less,
|
||||
// any other different and the number must be stored as a rational.
|
||||
#[allow(clippy::float_cmp)]
|
||||
if f64::from(int_val) == val {
|
||||
// For performance reasons we attempt to store values as integers if possible.
|
||||
Numeric::Integer(int_val)
|
||||
} else {
|
||||
Numeric::Rational(val)
|
||||
}
|
||||
},
|
||||
NumericKind::Integer(base) => {
|
||||
i32::from_str_radix(num_str, base).map_or_else(|_| {
|
||||
let num = BigInt::parse_bytes(num_str.as_bytes(), base).expect("Failed to parse integer after checks");
|
||||
Numeric::Rational(num.to_f64().unwrap_or(f64::INFINITY))
|
||||
}, Numeric::Integer)
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Token::new(
|
||||
TokenKind::NumericLiteral(num),
|
||||
Span::new(start_pos, cursor.pos()),
|
||||
))
|
||||
}
|
||||
}
|
||||
185
javascript-engine/external/boa/boa_parser/src/lexer/operator.rs
vendored
Normal file
185
javascript-engine/external/boa/boa_parser/src/lexer/operator.rs
vendored
Normal file
@@ -0,0 +1,185 @@
|
||||
//! Boa's lexing for ECMAScript operators (+, - etc.).
|
||||
|
||||
use crate::lexer::{Cursor, Error, Token, TokenKind, Tokenizer};
|
||||
use boa_ast::{Position, Punctuator, Span};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// `vop` tests the next token to see if we're on an assign operation of just a plain binary operation.
|
||||
///
|
||||
/// If the next value is not an assignment operation it will pattern match the provided values and return the corresponding token.
|
||||
macro_rules! vop {
|
||||
($cursor:ident, $assign_op:expr, $op:expr) => ({
|
||||
match $cursor.peek()? {
|
||||
None => Err(Error::syntax("abrupt end - could not preview next value as part of the operator", $cursor.pos())),
|
||||
Some(b'=') => {
|
||||
$cursor.next_byte()?.expect("= token vanished");
|
||||
$cursor.next_column();
|
||||
$assign_op
|
||||
}
|
||||
Some(_) => $op,
|
||||
}
|
||||
});
|
||||
($cursor:ident, $assign_op:expr, $op:expr, {$($case:pat => $block:expr), +}) => ({
|
||||
match $cursor.peek()? {
|
||||
None => Err(Error::syntax("abrupt end - could not preview next value as part of the operator", $cursor.pos())),
|
||||
Some(b'=') => {
|
||||
$cursor.next_byte()?.expect("= token vanished");
|
||||
$cursor.next_column();
|
||||
$assign_op
|
||||
},
|
||||
$($case => {
|
||||
$cursor.next_byte()?.expect("Token vanished");
|
||||
$cursor.next_column();
|
||||
$block
|
||||
})+,
|
||||
_ => $op,
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// The `op` macro handles binary operations or assignment operations and converts them into tokens.
|
||||
macro_rules! op {
|
||||
($cursor:ident, $start_pos:expr, $assign_op:expr, $op:expr) => ({
|
||||
Ok(Token::new(
|
||||
vop!($cursor, $assign_op, $op)?.into(),
|
||||
Span::new($start_pos, $cursor.pos()),
|
||||
))
|
||||
});
|
||||
($cursor:ident, $start_pos:expr, $assign_op:expr, $op:expr, {$($case:pat => $block:expr),+}) => ({
|
||||
let punc: Punctuator = vop!($cursor, $assign_op, $op, {$($case => $block),+})?;
|
||||
Ok(Token::new(
|
||||
punc.into(),
|
||||
Span::new($start_pos, $cursor.pos()),
|
||||
))
|
||||
});
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct Operator {
|
||||
init: u8,
|
||||
}
|
||||
|
||||
/// Operator lexing.
|
||||
///
|
||||
/// Assumes that the cursor has already consumed the operator starting symbol (stored in init).
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript reference][spec]
|
||||
/// - [MDN documentation][mdn]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-ecmascript-language-expressions
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators
|
||||
impl Operator {
|
||||
/// Creates a new operator lexer.
|
||||
pub(super) const fn new(init: u8) -> Self {
|
||||
Self { init }
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> Tokenizer<R> for Operator {
|
||||
fn lex(
|
||||
&mut self,
|
||||
cursor: &mut Cursor<R>,
|
||||
start_pos: Position,
|
||||
_interner: &mut Interner,
|
||||
) -> Result<Token, Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
let _timer = Profiler::global().start_event("Operator", "Lexing");
|
||||
|
||||
match self.init {
|
||||
b'*' => op!(cursor, start_pos, Ok(Punctuator::AssignMul), Ok(Punctuator::Mul), {
|
||||
Some(b'*') => vop!(cursor, Ok(Punctuator::AssignPow), Ok(Punctuator::Exp))
|
||||
}),
|
||||
b'+' => op!(cursor, start_pos, Ok(Punctuator::AssignAdd), Ok(Punctuator::Add), {
|
||||
Some(b'+') => Ok(Punctuator::Inc)
|
||||
}),
|
||||
b'-' => op!(cursor, start_pos, Ok(Punctuator::AssignSub), Ok(Punctuator::Sub), {
|
||||
Some(b'-') => {
|
||||
Ok(Punctuator::Dec)
|
||||
}
|
||||
}),
|
||||
b'%' => op!(
|
||||
cursor,
|
||||
start_pos,
|
||||
Ok(Punctuator::AssignMod),
|
||||
Ok(Punctuator::Mod)
|
||||
),
|
||||
b'|' => op!(cursor, start_pos, Ok(Punctuator::AssignOr), Ok(Punctuator::Or), {
|
||||
Some(b'|') => vop!(cursor, Ok(Punctuator::AssignBoolOr), Ok(Punctuator::BoolOr))
|
||||
}),
|
||||
b'&' => op!(cursor, start_pos, Ok(Punctuator::AssignAnd), Ok(Punctuator::And), {
|
||||
Some(b'&') => vop!(cursor, Ok(Punctuator::AssignBoolAnd), Ok(Punctuator::BoolAnd))
|
||||
}),
|
||||
b'?' => {
|
||||
let (first, second) = (
|
||||
cursor.peek_n(2)?.first().copied(),
|
||||
cursor.peek_n(2)?.get(1).copied(),
|
||||
);
|
||||
match first {
|
||||
Some(b'?') => {
|
||||
let _ = cursor.next_byte()?.expect("? vanished");
|
||||
op!(
|
||||
cursor,
|
||||
start_pos,
|
||||
Ok(Punctuator::AssignCoalesce),
|
||||
Ok(Punctuator::Coalesce)
|
||||
)
|
||||
}
|
||||
Some(b'.') if !matches!(second, Some(second) if second.is_ascii_digit()) => {
|
||||
let _ = cursor.next_byte()?.expect(". vanished");
|
||||
Ok(Token::new(
|
||||
TokenKind::Punctuator(Punctuator::Optional),
|
||||
Span::new(start_pos, cursor.pos()),
|
||||
))
|
||||
}
|
||||
_ => Ok(Token::new(
|
||||
TokenKind::Punctuator(Punctuator::Question),
|
||||
Span::new(start_pos, cursor.pos()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
b'^' => op!(
|
||||
cursor,
|
||||
start_pos,
|
||||
Ok(Punctuator::AssignXor),
|
||||
Ok(Punctuator::Xor)
|
||||
),
|
||||
b'=' => op!(cursor, start_pos, if cursor.next_is(b'=')? {
|
||||
Ok(Punctuator::StrictEq)
|
||||
} else {
|
||||
Ok(Punctuator::Eq)
|
||||
}, Ok(Punctuator::Assign), {
|
||||
Some(b'>') => {
|
||||
Ok(Punctuator::Arrow)
|
||||
}
|
||||
}),
|
||||
b'<' => {
|
||||
op!(cursor, start_pos, Ok(Punctuator::LessThanOrEq), Ok(Punctuator::LessThan), {
|
||||
Some(b'<') => vop!(cursor, Ok(Punctuator::AssignLeftSh), Ok(Punctuator::LeftSh))
|
||||
})
|
||||
}
|
||||
b'>' => {
|
||||
op!(cursor, start_pos, Ok(Punctuator::GreaterThanOrEq), Ok(Punctuator::GreaterThan), {
|
||||
Some(b'>') => vop!(cursor, Ok(Punctuator::AssignRightSh), Ok(Punctuator::RightSh), {
|
||||
Some(b'>') => vop!(cursor, Ok(Punctuator::AssignURightSh), Ok(Punctuator::URightSh))
|
||||
})
|
||||
})
|
||||
}
|
||||
b'!' => op!(
|
||||
cursor,
|
||||
start_pos,
|
||||
vop!(cursor, Ok(Punctuator::StrictNotEq), Ok(Punctuator::NotEq)),
|
||||
Ok(Punctuator::Not)
|
||||
),
|
||||
b'~' => Ok(Token::new(
|
||||
Punctuator::Neg.into(),
|
||||
Span::new(start_pos, cursor.pos()),
|
||||
)),
|
||||
op => unimplemented!("operator {}", op),
|
||||
}
|
||||
}
|
||||
}
|
||||
76
javascript-engine/external/boa/boa_parser/src/lexer/private_identifier.rs
vendored
Normal file
76
javascript-engine/external/boa/boa_parser/src/lexer/private_identifier.rs
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
//! Boa's lexing for ECMAScript private identifiers (#foo, #myvar, etc.).
|
||||
|
||||
use crate::lexer::{identifier::Identifier, Cursor, Error, Token, TokenKind, Tokenizer};
|
||||
use boa_ast::{Position, Span};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Private Identifier lexing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript reference][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-PrivateIdentifier
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct PrivateIdentifier;
|
||||
|
||||
impl PrivateIdentifier {
|
||||
/// Creates a new private identifier lexer.
|
||||
pub(super) const fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> Tokenizer<R> for PrivateIdentifier {
|
||||
fn lex(
|
||||
&mut self,
|
||||
cursor: &mut Cursor<R>,
|
||||
start_pos: Position,
|
||||
interner: &mut Interner,
|
||||
) -> Result<Token, Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
let _timer = Profiler::global().start_event("PrivateIdentifier", "Lexing");
|
||||
|
||||
if let Some(next_ch) = cursor.next_char()? {
|
||||
if let Ok(c) = char::try_from(next_ch) {
|
||||
match c {
|
||||
'\\' if cursor.peek()? == Some(b'u') => {
|
||||
let (name, _) = Identifier::take_identifier_name(cursor, start_pos, c)?;
|
||||
Ok(Token::new(
|
||||
TokenKind::PrivateIdentifier(interner.get_or_intern(name.as_str())),
|
||||
Span::new(start_pos, cursor.pos()),
|
||||
))
|
||||
}
|
||||
_ if Identifier::is_identifier_start(c as u32) => {
|
||||
let (name, _) = Identifier::take_identifier_name(cursor, start_pos, c)?;
|
||||
Ok(Token::new(
|
||||
TokenKind::PrivateIdentifier(interner.get_or_intern(name.as_str())),
|
||||
Span::new(start_pos, cursor.pos()),
|
||||
))
|
||||
}
|
||||
_ => Err(Error::syntax(
|
||||
"Abrupt end: Expecting private identifier",
|
||||
start_pos,
|
||||
)),
|
||||
}
|
||||
} else {
|
||||
Err(Error::syntax(
|
||||
format!(
|
||||
"unexpected utf-8 char '\\u{next_ch}' at line {}, column {}",
|
||||
start_pos.line_number(),
|
||||
start_pos.column_number()
|
||||
),
|
||||
start_pos,
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Err(Error::syntax(
|
||||
"Abrupt end: Expecting private identifier",
|
||||
start_pos,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
239
javascript-engine/external/boa/boa_parser/src/lexer/regex.rs
vendored
Normal file
239
javascript-engine/external/boa/boa_parser/src/lexer/regex.rs
vendored
Normal file
@@ -0,0 +1,239 @@
|
||||
//! Boa's lexing for ECMAScript regex literals.
|
||||
|
||||
use crate::lexer::{Cursor, Error, Span, Token, TokenKind, Tokenizer};
|
||||
use bitflags::bitflags;
|
||||
use boa_ast::Position;
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_profiler::Profiler;
|
||||
use regress::Regex;
|
||||
use std::{
|
||||
io::{self, ErrorKind, Read},
|
||||
str::{self, FromStr},
|
||||
};
|
||||
|
||||
/// Regex literal lexing.
|
||||
///
|
||||
/// Lexes Division, Assigndiv or Regex literal.
|
||||
///
|
||||
/// Expects: Initial '/' to already be consumed by cursor.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript reference][spec]
|
||||
/// - [MDN documentation][mdn]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-literals-regular-expression-literals
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct RegexLiteral;
|
||||
|
||||
impl<R> Tokenizer<R> for RegexLiteral {
|
||||
fn lex(
|
||||
&mut self,
|
||||
cursor: &mut Cursor<R>,
|
||||
start_pos: Position,
|
||||
interner: &mut Interner,
|
||||
) -> Result<Token, Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
let _timer = Profiler::global().start_event("RegexLiteral", "Lexing");
|
||||
|
||||
let mut body = Vec::new();
|
||||
let mut is_class_char = false;
|
||||
|
||||
// Lex RegularExpressionBody.
|
||||
loop {
|
||||
match cursor.next_byte()? {
|
||||
None => {
|
||||
// Abrupt end.
|
||||
return Err(Error::syntax(
|
||||
"abrupt end on regular expression",
|
||||
cursor.pos(),
|
||||
));
|
||||
}
|
||||
Some(b) => {
|
||||
match b {
|
||||
b'/' if !is_class_char => break, // RegularExpressionBody finished.
|
||||
b'[' => {
|
||||
is_class_char = true;
|
||||
body.push(b);
|
||||
}
|
||||
b']' if is_class_char => {
|
||||
is_class_char = false;
|
||||
body.push(b);
|
||||
}
|
||||
b'\n' | b'\r' => {
|
||||
// Not allowed in Regex literal.
|
||||
return Err(Error::syntax(
|
||||
"new lines are not allowed in regular expressions",
|
||||
cursor.pos(),
|
||||
));
|
||||
}
|
||||
0xE2 if (cursor.peek_n(2)? == [0x80, 0xA8]
|
||||
|| cursor.peek_n(2)? == [0x80, 0xA9]) =>
|
||||
{
|
||||
// '\u{2028}' (e2 80 a8) and '\u{2029}' (e2 80 a9) are not allowed
|
||||
return Err(Error::syntax(
|
||||
"new lines are not allowed in regular expressions",
|
||||
cursor.pos(),
|
||||
));
|
||||
}
|
||||
b'\\' => {
|
||||
// Escape sequence
|
||||
body.push(b'\\');
|
||||
if let Some(sc) = cursor.next_byte()? {
|
||||
match sc {
|
||||
b'\n' | b'\r' => {
|
||||
// Not allowed in Regex literal.
|
||||
return Err(Error::syntax(
|
||||
"new lines are not allowed in regular expressions",
|
||||
cursor.pos(),
|
||||
));
|
||||
}
|
||||
0xE2 if (cursor.peek_n(2)? == [0x80, 0xA8]
|
||||
|| cursor.peek_n(2)? == [0x80, 0xA9]) =>
|
||||
{
|
||||
// '\u{2028}' (e2 80 a8) and '\u{2029}' (e2 80 a9) are not allowed
|
||||
return Err(Error::syntax(
|
||||
"new lines are not allowed in regular expressions",
|
||||
cursor.pos(),
|
||||
));
|
||||
}
|
||||
b => body.push(b),
|
||||
}
|
||||
} else {
|
||||
// Abrupt end of regex.
|
||||
return Err(Error::syntax(
|
||||
"abrupt end on regular expression",
|
||||
cursor.pos(),
|
||||
));
|
||||
}
|
||||
}
|
||||
_ => body.push(b),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut flags = Vec::new();
|
||||
let flags_start = cursor.pos();
|
||||
cursor.take_while_ascii_pred(&mut flags, &char::is_alphabetic)?;
|
||||
|
||||
let flags_str = unsafe { str::from_utf8_unchecked(flags.as_slice()) };
|
||||
if let Ok(body_str) = str::from_utf8(body.as_slice()) {
|
||||
if let Err(error) = Regex::with_flags(body_str, flags_str) {
|
||||
return Err(Error::Syntax(
|
||||
format!("Invalid regular expression literal: {error}").into(),
|
||||
start_pos,
|
||||
));
|
||||
}
|
||||
|
||||
Ok(Token::new(
|
||||
TokenKind::regular_expression_literal(
|
||||
interner.get_or_intern(body_str),
|
||||
parse_regex_flags(flags_str, flags_start, interner)?,
|
||||
),
|
||||
Span::new(start_pos, cursor.pos()),
|
||||
))
|
||||
} else {
|
||||
Err(Error::from(io::Error::new(
|
||||
ErrorKind::InvalidData,
|
||||
"Invalid UTF-8 character in regular expressions",
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
/// Flags of a regular expression.
|
||||
#[derive(Default)]
|
||||
pub struct RegExpFlags: u8 {
|
||||
/// Whether to test the regular expression against all possible matches in a string,
|
||||
/// or only against the first.
|
||||
const GLOBAL = 0b0000_0001;
|
||||
|
||||
/// Whether to ignore case while attempting a match in a string.
|
||||
const IGNORE_CASE = 0b0000_0010;
|
||||
|
||||
/// Whether or not to search in strings across multiple lines.
|
||||
const MULTILINE = 0b0000_0100;
|
||||
|
||||
/// Whether `.` matches newlines or not.
|
||||
const DOT_ALL = 0b0000_1000;
|
||||
|
||||
/// Whether or not Unicode features are enabled.
|
||||
const UNICODE = 0b0001_0000;
|
||||
|
||||
/// Whether or not the search is sticky.
|
||||
const STICKY = 0b0010_0000;
|
||||
|
||||
/// Whether the regular expression result exposes the start and end indices of
|
||||
/// captured substrings.
|
||||
const HAS_INDICES = 0b0100_0000;
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for RegExpFlags {
|
||||
type Err = String;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut flags = Self::default();
|
||||
for c in s.bytes() {
|
||||
let new_flag = match c {
|
||||
b'g' => Self::GLOBAL,
|
||||
b'i' => Self::IGNORE_CASE,
|
||||
b'm' => Self::MULTILINE,
|
||||
b's' => Self::DOT_ALL,
|
||||
b'u' => Self::UNICODE,
|
||||
b'y' => Self::STICKY,
|
||||
b'd' => Self::HAS_INDICES,
|
||||
_ => return Err(format!("invalid regular expression flag {}", char::from(c))),
|
||||
};
|
||||
|
||||
if flags.contains(new_flag) {
|
||||
return Err(format!(
|
||||
"repeated regular expression flag {}",
|
||||
char::from(c)
|
||||
));
|
||||
}
|
||||
flags.insert(new_flag);
|
||||
}
|
||||
|
||||
Ok(flags)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_regex_flags(s: &str, start: Position, interner: &mut Interner) -> Result<Sym, Error> {
|
||||
match RegExpFlags::from_str(s) {
|
||||
Err(message) => Err(Error::Syntax(message.into(), start)),
|
||||
Ok(flags) => Ok(interner.get_or_intern(flags.to_string().as_str())),
|
||||
}
|
||||
}
|
||||
|
||||
impl ToString for RegExpFlags {
|
||||
fn to_string(&self) -> String {
|
||||
let mut s = String::new();
|
||||
if self.contains(Self::HAS_INDICES) {
|
||||
s.push('d');
|
||||
}
|
||||
if self.contains(Self::GLOBAL) {
|
||||
s.push('g');
|
||||
}
|
||||
if self.contains(Self::IGNORE_CASE) {
|
||||
s.push('i');
|
||||
}
|
||||
if self.contains(Self::MULTILINE) {
|
||||
s.push('m');
|
||||
}
|
||||
if self.contains(Self::DOT_ALL) {
|
||||
s.push('s');
|
||||
}
|
||||
if self.contains(Self::UNICODE) {
|
||||
s.push('u');
|
||||
}
|
||||
if self.contains(Self::STICKY) {
|
||||
s.push('y');
|
||||
}
|
||||
s
|
||||
}
|
||||
}
|
||||
61
javascript-engine/external/boa/boa_parser/src/lexer/spread.rs
vendored
Normal file
61
javascript-engine/external/boa/boa_parser/src/lexer/spread.rs
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
//! Boa's lexing for ECMAScript spread (...) literals.
|
||||
|
||||
use crate::lexer::{Cursor, Error, Token, Tokenizer};
|
||||
use boa_ast::{Position, Punctuator, Span};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Spread literal lexing.
|
||||
///
|
||||
/// Note: expects for the initializer `'` or `"` to already be consumed from the cursor.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript reference][spec]
|
||||
/// - [MDN documentation][mdn]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-SpreadElement
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Spread_syntax
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct SpreadLiteral;
|
||||
|
||||
impl SpreadLiteral {
|
||||
/// Creates a new string literal lexer.
|
||||
pub(super) const fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> Tokenizer<R> for SpreadLiteral {
|
||||
fn lex(
|
||||
&mut self,
|
||||
cursor: &mut Cursor<R>,
|
||||
start_pos: Position,
|
||||
_interner: &mut Interner,
|
||||
) -> Result<Token, Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
let _timer = Profiler::global().start_event("SpreadLiteral", "Lexing");
|
||||
|
||||
// . or ...
|
||||
if cursor.next_is(b'.')? {
|
||||
if cursor.next_is(b'.')? {
|
||||
Ok(Token::new(
|
||||
Punctuator::Spread.into(),
|
||||
Span::new(start_pos, cursor.pos()),
|
||||
))
|
||||
} else {
|
||||
Err(Error::syntax(
|
||||
"Expecting Token '.' as part of spread",
|
||||
cursor.pos(),
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Ok(Token::new(
|
||||
Punctuator::Dot.into(),
|
||||
Span::new(start_pos, cursor.pos()),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
344
javascript-engine/external/boa/boa_parser/src/lexer/string.rs
vendored
Normal file
344
javascript-engine/external/boa/boa_parser/src/lexer/string.rs
vendored
Normal file
@@ -0,0 +1,344 @@
|
||||
//! Boa's lexing for ECMAScript string literals.
|
||||
|
||||
use crate::lexer::{Cursor, Error, Token, TokenKind, Tokenizer};
|
||||
use boa_ast::{Position, Span};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::{
|
||||
io::{self, ErrorKind, Read},
|
||||
str,
|
||||
};
|
||||
|
||||
/// String literal lexing.
|
||||
///
|
||||
/// Note: expects for the initializer `'` or `"` to already be consumed from the cursor.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript reference][spec]
|
||||
/// - [MDN documentation][mdn]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-literals-string-literals
|
||||
/// [mdn]: https://developer.cdn.mozilla.net/en-US/docs/Web/JavaScript/Reference/Global_Objects/String
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct StringLiteral {
|
||||
terminator: StringTerminator,
|
||||
}
|
||||
|
||||
impl StringLiteral {
|
||||
/// Creates a new string literal lexer.
|
||||
pub(super) fn new(init: char) -> Self {
|
||||
let terminator = match init {
|
||||
'\'' => StringTerminator::SingleQuote,
|
||||
'"' => StringTerminator::DoubleQuote,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
Self { terminator }
|
||||
}
|
||||
}
|
||||
|
||||
/// Terminator for the string.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) enum StringTerminator {
|
||||
SingleQuote,
|
||||
DoubleQuote,
|
||||
}
|
||||
|
||||
/// Extends a buffer type to store UTF-16 code units and convert to string.
|
||||
pub(crate) trait UTF16CodeUnitsBuffer {
|
||||
/// Encodes the code point to UTF-16 code units and push to the buffer.
|
||||
fn push_code_point(&mut self, code_point: u32);
|
||||
|
||||
/// Decodes the buffer into a String and replace the invalid data with the replacement character (U+FFFD).
|
||||
fn to_string_lossy(&self) -> String;
|
||||
}
|
||||
|
||||
impl UTF16CodeUnitsBuffer for Vec<u16> {
|
||||
fn push_code_point(&mut self, mut code_point: u32) {
|
||||
if let Ok(cp) = code_point.try_into() {
|
||||
self.push(cp);
|
||||
return;
|
||||
}
|
||||
code_point -= 0x10000;
|
||||
|
||||
let cu1 = (code_point / 1024 + 0xD800)
|
||||
.try_into()
|
||||
.expect("decoded an u32 into two u16.");
|
||||
let cu2 = (code_point % 1024 + 0xDC00)
|
||||
.try_into()
|
||||
.expect("decoded an u32 into two u16.");
|
||||
self.push(cu1);
|
||||
self.push(cu2);
|
||||
}
|
||||
|
||||
fn to_string_lossy(&self) -> String {
|
||||
String::from_utf16_lossy(self.as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> Tokenizer<R> for StringLiteral {
|
||||
fn lex(
|
||||
&mut self,
|
||||
cursor: &mut Cursor<R>,
|
||||
start_pos: Position,
|
||||
interner: &mut Interner,
|
||||
) -> Result<Token, Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
let _timer = Profiler::global().start_event("StringLiteral", "Lexing");
|
||||
|
||||
let (lit, span) =
|
||||
Self::take_string_characters(cursor, start_pos, self.terminator, cursor.strict_mode())?;
|
||||
|
||||
Ok(Token::new(
|
||||
TokenKind::string_literal(interner.get_or_intern(&lit[..])),
|
||||
span,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl StringLiteral {
|
||||
/// Checks if a character is `LineTerminator` as per ECMAScript standards.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript reference][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-LineTerminator
|
||||
pub(super) const fn is_line_terminator(ch: u32) -> bool {
|
||||
matches!(
|
||||
ch,
|
||||
0x000A /* <LF> */ | 0x000D /* <CR> */ | 0x2028 /* <LS> */ | 0x2029 /* <PS> */
|
||||
)
|
||||
}
|
||||
|
||||
fn take_string_characters<R>(
|
||||
cursor: &mut Cursor<R>,
|
||||
start_pos: Position,
|
||||
terminator: StringTerminator,
|
||||
is_strict_mode: bool,
|
||||
) -> Result<(Vec<u16>, Span), Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
let mut buf = Vec::new();
|
||||
loop {
|
||||
let ch_start_pos = cursor.pos();
|
||||
let ch = cursor.next_char()?;
|
||||
|
||||
match ch {
|
||||
Some(0x0027 /* ' */) if terminator == StringTerminator::SingleQuote => break,
|
||||
Some(0x0022 /* " */) if terminator == StringTerminator::DoubleQuote => break,
|
||||
Some(0x005C /* \ */) => {
|
||||
let _timer =
|
||||
Profiler::global().start_event("StringLiteral - escape sequence", "Lexing");
|
||||
|
||||
if let Some(escape_value) = Self::take_escape_sequence_or_line_continuation(
|
||||
cursor,
|
||||
ch_start_pos,
|
||||
is_strict_mode,
|
||||
false,
|
||||
)? {
|
||||
buf.push_code_point(escape_value);
|
||||
}
|
||||
}
|
||||
Some(0x2028) => buf.push(0x2028 /* <LS> */),
|
||||
Some(0x2029) => buf.push(0x2029 /* <PS> */),
|
||||
Some(ch) if !Self::is_line_terminator(ch) => {
|
||||
buf.push_code_point(ch);
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::from(io::Error::new(
|
||||
ErrorKind::UnexpectedEof,
|
||||
"unterminated string literal",
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((buf, Span::new(start_pos, cursor.pos())))
|
||||
}
|
||||
|
||||
pub(super) fn take_escape_sequence_or_line_continuation<R>(
|
||||
cursor: &mut Cursor<R>,
|
||||
start_pos: Position,
|
||||
is_strict_mode: bool,
|
||||
is_template_literal: bool,
|
||||
) -> Result<Option<u32>, Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
let escape_ch = cursor.next_char()?.ok_or_else(|| {
|
||||
Error::from(io::Error::new(
|
||||
ErrorKind::UnexpectedEof,
|
||||
"unterminated escape sequence in literal",
|
||||
))
|
||||
})?;
|
||||
|
||||
let escape_value = match escape_ch {
|
||||
0x0062 /* b */ => Some(0x0008 /* <BS> */),
|
||||
0x0074 /* t */ => Some(0x0009 /* <HT> */),
|
||||
0x006E /* n */ => Some(0x000A /* <LF> */),
|
||||
0x0076 /* v */ => Some(0x000B /* <VT> */),
|
||||
0x0066 /* f */ => Some(0x000C /* <FF> */),
|
||||
0x0072 /* r */ => Some(0x000D /* <CR> */),
|
||||
0x0022 /* " */ => Some(0x0022 /* " */),
|
||||
0x0027 /* ' */ => Some(0x0027 /* ' */),
|
||||
0x005C /* \ */ => Some(0x005C /* \ */),
|
||||
0x0030 /* 0 */ if cursor
|
||||
.peek()?
|
||||
.filter(|next_byte| (b'0'..=b'9').contains(next_byte))
|
||||
.is_none() =>
|
||||
Some(0x0000 /* NULL */),
|
||||
0x0078 /* x */ => {
|
||||
Some(Self::take_hex_escape_sequence(cursor, start_pos)?)
|
||||
}
|
||||
0x0075 /* u */ => {
|
||||
Some(Self::take_unicode_escape_sequence(cursor, start_pos)?)
|
||||
}
|
||||
0x0038 /* 8 */ | 0x0039 /* 9 */ => {
|
||||
// Grammar: NonOctalDecimalEscapeSequence
|
||||
if is_template_literal {
|
||||
return Err(Error::syntax(
|
||||
"\\8 and \\9 are not allowed in template literal",
|
||||
start_pos,
|
||||
));
|
||||
} else if is_strict_mode {
|
||||
return Err(Error::syntax(
|
||||
"\\8 and \\9 are not allowed in strict mode",
|
||||
start_pos,
|
||||
));
|
||||
}
|
||||
Some(escape_ch)
|
||||
}
|
||||
_ if (0x0030..=0x0037 /* '0'..='7' */).contains(&escape_ch) => {
|
||||
if is_template_literal {
|
||||
return Err(Error::syntax(
|
||||
"octal escape sequences are not allowed in template literal",
|
||||
start_pos,
|
||||
));
|
||||
}
|
||||
|
||||
if is_strict_mode {
|
||||
return Err(Error::syntax(
|
||||
"octal escape sequences are not allowed in strict mode",
|
||||
start_pos,
|
||||
));
|
||||
}
|
||||
|
||||
Some(Self::take_legacy_octal_escape_sequence(
|
||||
cursor,
|
||||
escape_ch.try_into().expect("an ascii char must not fail to convert"),
|
||||
)?)
|
||||
}
|
||||
_ if Self::is_line_terminator(escape_ch) => {
|
||||
// Grammar: LineContinuation
|
||||
// Grammar: \ LineTerminatorSequence
|
||||
// LineContinuation is the empty String.
|
||||
None
|
||||
}
|
||||
_ => {
|
||||
Some(escape_ch)
|
||||
}
|
||||
};
|
||||
|
||||
Ok(escape_value)
|
||||
}
|
||||
|
||||
pub(super) fn take_unicode_escape_sequence<R>(
|
||||
cursor: &mut Cursor<R>,
|
||||
start_pos: Position,
|
||||
) -> Result<u32, Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
// Support \u{X..X} (Unicode CodePoint)
|
||||
if cursor.next_is(b'{')? {
|
||||
// TODO: use bytes for a bit better performance (using stack)
|
||||
let mut code_point_buf = Vec::with_capacity(6);
|
||||
cursor.take_until(b'}', &mut code_point_buf)?;
|
||||
|
||||
let code_point = str::from_utf8(code_point_buf.as_slice())
|
||||
.ok()
|
||||
.and_then(|code_point_str| {
|
||||
// The `code_point_str` should represent a single unicode codepoint, convert to u32
|
||||
u32::from_str_radix(code_point_str, 16).ok()
|
||||
})
|
||||
.ok_or_else(|| {
|
||||
Error::syntax("malformed Unicode character escape sequence", start_pos)
|
||||
})?;
|
||||
|
||||
// UTF16Encoding of a numeric code point value
|
||||
if code_point > 0x10_FFFF {
|
||||
return Err(Error::syntax(
|
||||
"Unicode codepoint must not be greater than 0x10FFFF in escape sequence",
|
||||
start_pos,
|
||||
));
|
||||
}
|
||||
|
||||
Ok(code_point)
|
||||
} else {
|
||||
// Grammar: Hex4Digits
|
||||
// Collect each character after \u e.g \uD83D will give "D83D"
|
||||
let mut code_point_utf8_bytes = [0u8; 4];
|
||||
cursor.fill_bytes(&mut code_point_utf8_bytes)?;
|
||||
|
||||
// Convert to u16
|
||||
let code_point = str::from_utf8(&code_point_utf8_bytes)
|
||||
.ok()
|
||||
.and_then(|code_point_str| u16::from_str_radix(code_point_str, 16).ok())
|
||||
.ok_or_else(|| Error::syntax("invalid Unicode escape sequence", start_pos))?;
|
||||
|
||||
Ok(u32::from(code_point))
|
||||
}
|
||||
}
|
||||
|
||||
fn take_hex_escape_sequence<R>(
|
||||
cursor: &mut Cursor<R>,
|
||||
start_pos: Position,
|
||||
) -> Result<u32, Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
let mut code_point_utf8_bytes = [0u8; 2];
|
||||
cursor.fill_bytes(&mut code_point_utf8_bytes)?;
|
||||
let code_point = str::from_utf8(&code_point_utf8_bytes)
|
||||
.ok()
|
||||
.and_then(|code_point_str| u16::from_str_radix(code_point_str, 16).ok())
|
||||
.ok_or_else(|| Error::syntax("invalid Hexadecimal escape sequence", start_pos))?;
|
||||
|
||||
Ok(u32::from(code_point))
|
||||
}
|
||||
|
||||
fn take_legacy_octal_escape_sequence<R>(
|
||||
cursor: &mut Cursor<R>,
|
||||
init_byte: u8,
|
||||
) -> Result<u32, Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
// Grammar: OctalDigit
|
||||
let mut code_point = u32::from(init_byte - b'0');
|
||||
|
||||
// Grammar: ZeroToThree OctalDigit
|
||||
// Grammar: FourToSeven OctalDigit
|
||||
if let Some(byte) = cursor.peek()? {
|
||||
if (b'0'..=b'7').contains(&byte) {
|
||||
let _ = cursor.next_byte()?;
|
||||
code_point = (code_point * 8) + u32::from(byte - b'0');
|
||||
|
||||
if (b'0'..=b'3').contains(&init_byte) {
|
||||
// Grammar: ZeroToThree OctalDigit OctalDigit
|
||||
if let Some(byte) = cursor.peek()? {
|
||||
if (b'0'..=b'7').contains(&byte) {
|
||||
let _ = cursor.next_byte()?;
|
||||
code_point = (code_point * 8) + u32::from(byte - b'0');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(code_point)
|
||||
}
|
||||
}
|
||||
161
javascript-engine/external/boa/boa_parser/src/lexer/template.rs
vendored
Normal file
161
javascript-engine/external/boa/boa_parser/src/lexer/template.rs
vendored
Normal file
@@ -0,0 +1,161 @@
|
||||
//! Boa's lexing for ECMAScript template literals.
|
||||
|
||||
use crate::lexer::{
|
||||
string::{StringLiteral, UTF16CodeUnitsBuffer},
|
||||
Cursor, Error, Token, TokenKind, Tokenizer,
|
||||
};
|
||||
use boa_ast::{Position, Span};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::{self, ErrorKind, Read};
|
||||
|
||||
#[cfg_attr(feature = "deser", derive(serde::Serialize, serde::Deserialize))]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct TemplateString {
|
||||
/// The template string of template literal with argument `raw` true.
|
||||
raw: Sym,
|
||||
/// The start position of the template string. Used to make lexer error if `to_owned_cooked`
|
||||
/// failed.
|
||||
start_pos: Position,
|
||||
}
|
||||
|
||||
impl TemplateString {
|
||||
/// Creates a new `TemplateString` with the given raw template ans start position.
|
||||
pub const fn new(raw: Sym, start_pos: Position) -> Self {
|
||||
Self { raw, start_pos }
|
||||
}
|
||||
|
||||
/// Converts the raw template string into a mutable string slice.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript reference][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-static-semantics-templatestrings
|
||||
pub const fn as_raw(self) -> Sym {
|
||||
self.raw
|
||||
}
|
||||
|
||||
/// Creates a new cooked template string. Returns a lexer error if it fails to cook the
|
||||
/// template string.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript reference][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-static-semantics-templatestrings
|
||||
pub fn to_owned_cooked(self, interner: &mut Interner) -> Result<Sym, Error> {
|
||||
let string = interner.resolve_expect(self.raw).to_string();
|
||||
let mut cursor = Cursor::with_position(string.as_bytes(), self.start_pos);
|
||||
let mut buf: Vec<u16> = Vec::new();
|
||||
|
||||
loop {
|
||||
let ch_start_pos = cursor.pos();
|
||||
let ch = cursor.next_char()?;
|
||||
|
||||
match ch {
|
||||
Some(0x005C /* \ */) => {
|
||||
let escape_value = StringLiteral::take_escape_sequence_or_line_continuation(
|
||||
&mut cursor,
|
||||
ch_start_pos,
|
||||
true,
|
||||
true,
|
||||
)?;
|
||||
|
||||
if let Some(escape_value) = escape_value {
|
||||
buf.push_code_point(escape_value);
|
||||
}
|
||||
}
|
||||
Some(ch) => {
|
||||
// The caller guarantees that sequences '`' and '${' never appear
|
||||
// LineTerminatorSequence <CR> <LF> is consumed by `cursor.next_char()` and
|
||||
// returns <LF>, which matches the TV of <CR> <LF>
|
||||
buf.push_code_point(ch);
|
||||
}
|
||||
None => break,
|
||||
}
|
||||
}
|
||||
|
||||
Ok(interner.get_or_intern(&buf[..]))
|
||||
}
|
||||
}
|
||||
|
||||
/// Template literal lexing.
|
||||
///
|
||||
/// Expects: Initial `` ` `` to already be consumed by cursor.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript reference][spec]
|
||||
/// - [MDN documentation][mdn]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-template-literals
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct TemplateLiteral;
|
||||
|
||||
impl<R> Tokenizer<R> for TemplateLiteral {
|
||||
fn lex(
|
||||
&mut self,
|
||||
cursor: &mut Cursor<R>,
|
||||
start_pos: Position,
|
||||
interner: &mut Interner,
|
||||
) -> Result<Token, Error>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
let _timer = Profiler::global().start_event("TemplateLiteral", "Lexing");
|
||||
|
||||
let mut buf = Vec::new();
|
||||
loop {
|
||||
let ch = cursor.next_char()?.ok_or_else(|| {
|
||||
Error::from(io::Error::new(
|
||||
ErrorKind::UnexpectedEof,
|
||||
"unterminated template literal",
|
||||
))
|
||||
})?;
|
||||
|
||||
match ch {
|
||||
// `
|
||||
0x0060 => {
|
||||
let raw_sym = interner.get_or_intern(&buf[..]);
|
||||
let template_string = TemplateString::new(raw_sym, start_pos);
|
||||
|
||||
return Ok(Token::new(
|
||||
TokenKind::template_no_substitution(template_string),
|
||||
Span::new(start_pos, cursor.pos()),
|
||||
));
|
||||
}
|
||||
// $
|
||||
0x0024 if cursor.next_is(b'{')? => {
|
||||
let raw_sym = interner.get_or_intern(&buf[..]);
|
||||
let template_string = TemplateString::new(raw_sym, start_pos);
|
||||
|
||||
return Ok(Token::new(
|
||||
TokenKind::template_middle(template_string),
|
||||
Span::new(start_pos, cursor.pos()),
|
||||
));
|
||||
}
|
||||
// \
|
||||
0x005C => {
|
||||
let escape_ch = cursor.peek()?.ok_or_else(|| {
|
||||
Error::from(io::Error::new(
|
||||
ErrorKind::UnexpectedEof,
|
||||
"unterminated escape sequence in literal",
|
||||
))
|
||||
})?;
|
||||
|
||||
buf.push(u16::from(b'\\'));
|
||||
match escape_ch {
|
||||
b'`' | b'$' | b'\\' => {
|
||||
let next_byte =
|
||||
cursor.next_byte()?.expect("already checked next character");
|
||||
buf.push(u16::from(next_byte));
|
||||
}
|
||||
_ => continue,
|
||||
}
|
||||
}
|
||||
ch => {
|
||||
buf.push_code_point(ch);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
1153
javascript-engine/external/boa/boa_parser/src/lexer/tests.rs
vendored
Normal file
1153
javascript-engine/external/boa/boa_parser/src/lexer/tests.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
260
javascript-engine/external/boa/boa_parser/src/lexer/token.rs
vendored
Normal file
260
javascript-engine/external/boa/boa_parser/src/lexer/token.rs
vendored
Normal file
@@ -0,0 +1,260 @@
|
||||
//! Boa's implementation of all ECMAScript [Token]s.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [ECMAScript reference][spec]
|
||||
//!
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-tokens
|
||||
|
||||
use crate::lexer::template::TemplateString;
|
||||
use boa_ast::{Keyword, Punctuator, Span};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use num_bigint::BigInt;
|
||||
|
||||
/// This represents the smallest individual words, phrases, or characters that JavaScript can understand.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript reference][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-tokens
|
||||
#[cfg_attr(feature = "deser", derive(serde::Serialize, serde::Deserialize))]
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Token {
|
||||
/// The token kind, which contains the actual data of the token.
|
||||
kind: TokenKind,
|
||||
/// The token position in the original source code.
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl Token {
|
||||
/// Create a new detailed token from the token data, line number and column number
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub const fn new(kind: TokenKind, span: Span) -> Self {
|
||||
Self { kind, span }
|
||||
}
|
||||
|
||||
/// Gets the kind of the token.
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub const fn kind(&self) -> &TokenKind {
|
||||
&self.kind
|
||||
}
|
||||
|
||||
/// Gets the token span in the original source code.
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub const fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
|
||||
/// Converts the token to a `String`.
|
||||
pub(crate) fn to_string(&self, interner: &Interner) -> String {
|
||||
self.kind.to_string(interner)
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents the type different types of numeric literals.
|
||||
#[cfg_attr(feature = "deser", derive(serde::Serialize, serde::Deserialize))]
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
pub enum Numeric {
|
||||
/// A floating point number
|
||||
Rational(f64),
|
||||
|
||||
/// An integer
|
||||
Integer(i32),
|
||||
|
||||
/// A BigInt
|
||||
BigInt(Box<BigInt>),
|
||||
}
|
||||
|
||||
impl From<f64> for Numeric {
|
||||
#[inline]
|
||||
fn from(n: f64) -> Self {
|
||||
Self::Rational(n)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<i32> for Numeric {
|
||||
#[inline]
|
||||
fn from(n: i32) -> Self {
|
||||
Self::Integer(n)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BigInt> for Numeric {
|
||||
#[inline]
|
||||
fn from(n: BigInt) -> Self {
|
||||
Self::BigInt(Box::new(n))
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents the type of Token and the data it has inside.
|
||||
#[cfg_attr(feature = "deser", derive(serde::Serialize, serde::Deserialize))]
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
pub enum TokenKind {
|
||||
/// A boolean literal, which is either `true` or `false`.
|
||||
BooleanLiteral(bool),
|
||||
|
||||
/// The end of the file.
|
||||
EOF,
|
||||
|
||||
/// An identifier.
|
||||
Identifier(Sym),
|
||||
|
||||
/// A private identifier.
|
||||
PrivateIdentifier(Sym),
|
||||
|
||||
/// A keyword and a flag if the keyword contains unicode escaped chars.
|
||||
Keyword((Keyword, bool)),
|
||||
|
||||
/// A `null` literal.
|
||||
NullLiteral,
|
||||
|
||||
/// A numeric literal.
|
||||
NumericLiteral(Numeric),
|
||||
|
||||
/// A piece of punctuation
|
||||
Punctuator(Punctuator),
|
||||
|
||||
/// A string literal.
|
||||
StringLiteral(Sym),
|
||||
|
||||
/// A part of a template literal without substitution.
|
||||
TemplateNoSubstitution(TemplateString),
|
||||
|
||||
/// The part of a template literal between substitutions
|
||||
TemplateMiddle(TemplateString),
|
||||
|
||||
/// A regular expression, consisting of body and flags.
|
||||
RegularExpressionLiteral(Sym, Sym),
|
||||
|
||||
/// Indicates the end of a line (`\n`).
|
||||
LineTerminator,
|
||||
|
||||
/// Indicates a comment, the content isn't stored.
|
||||
Comment,
|
||||
}
|
||||
|
||||
impl From<bool> for TokenKind {
|
||||
fn from(oth: bool) -> Self {
|
||||
Self::BooleanLiteral(oth)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(Keyword, bool)> for TokenKind {
|
||||
fn from(kw: (Keyword, bool)) -> Self {
|
||||
Self::Keyword(kw)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Punctuator> for TokenKind {
|
||||
fn from(punc: Punctuator) -> Self {
|
||||
Self::Punctuator(punc)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Numeric> for TokenKind {
|
||||
fn from(num: Numeric) -> Self {
|
||||
Self::NumericLiteral(num)
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenKind {
|
||||
/// Creates a `BooleanLiteral` token kind.
|
||||
#[must_use]
|
||||
pub const fn boolean_literal(lit: bool) -> Self {
|
||||
Self::BooleanLiteral(lit)
|
||||
}
|
||||
|
||||
/// Creates an `EOF` token kind.
|
||||
#[must_use]
|
||||
pub const fn eof() -> Self {
|
||||
Self::EOF
|
||||
}
|
||||
|
||||
/// Creates an `Identifier` token type.
|
||||
#[must_use]
|
||||
pub const fn identifier(ident: Sym) -> Self {
|
||||
Self::Identifier(ident)
|
||||
}
|
||||
|
||||
/// Creates a `NumericLiteral` token kind.
|
||||
pub fn numeric_literal<L>(lit: L) -> Self
|
||||
where
|
||||
L: Into<Numeric>,
|
||||
{
|
||||
Self::NumericLiteral(lit.into())
|
||||
}
|
||||
|
||||
/// Creates a `Punctuator` token type.
|
||||
#[must_use]
|
||||
pub const fn punctuator(punc: Punctuator) -> Self {
|
||||
Self::Punctuator(punc)
|
||||
}
|
||||
|
||||
/// Creates a `StringLiteral` token type.
|
||||
#[must_use]
|
||||
pub const fn string_literal(lit: Sym) -> Self {
|
||||
Self::StringLiteral(lit)
|
||||
}
|
||||
|
||||
/// Creates a `TemplateMiddle` token type.
|
||||
#[must_use]
|
||||
pub const fn template_middle(template_string: TemplateString) -> Self {
|
||||
Self::TemplateMiddle(template_string)
|
||||
}
|
||||
|
||||
/// Creates a `TemplateNoSubstitution` token type.
|
||||
#[must_use]
|
||||
pub const fn template_no_substitution(template_string: TemplateString) -> Self {
|
||||
Self::TemplateNoSubstitution(template_string)
|
||||
}
|
||||
|
||||
/// Creates a `RegularExpressionLiteral` token kind.
|
||||
#[must_use]
|
||||
pub const fn regular_expression_literal(body: Sym, flags: Sym) -> Self {
|
||||
Self::RegularExpressionLiteral(body, flags)
|
||||
}
|
||||
|
||||
/// Creates a `LineTerminator` token kind.
|
||||
#[must_use]
|
||||
pub const fn line_terminator() -> Self {
|
||||
Self::LineTerminator
|
||||
}
|
||||
|
||||
/// Creates a 'Comment' token kind.
|
||||
#[must_use]
|
||||
pub const fn comment() -> Self {
|
||||
Self::Comment
|
||||
}
|
||||
|
||||
/// Implements the `ToString` functionality for the `TokenKind`.
|
||||
#[must_use]
|
||||
pub fn to_string(&self, interner: &Interner) -> String {
|
||||
match *self {
|
||||
Self::BooleanLiteral(val) => val.to_string(),
|
||||
Self::EOF => "end of file".to_owned(),
|
||||
Self::Identifier(ident) => interner.resolve_expect(ident).to_string(),
|
||||
Self::PrivateIdentifier(ident) => format!("#{}", interner.resolve_expect(ident)),
|
||||
Self::Keyword((word, _)) => word.to_string(),
|
||||
Self::NullLiteral => "null".to_owned(),
|
||||
Self::NumericLiteral(Numeric::Rational(num)) => num.to_string(),
|
||||
Self::NumericLiteral(Numeric::Integer(num)) => num.to_string(),
|
||||
Self::NumericLiteral(Numeric::BigInt(ref num)) => format!("{num}n"),
|
||||
Self::Punctuator(punc) => punc.to_string(),
|
||||
Self::StringLiteral(lit) => interner.resolve_expect(lit).to_string(),
|
||||
Self::TemplateNoSubstitution(ts) | Self::TemplateMiddle(ts) => {
|
||||
interner.resolve_expect(ts.as_raw()).to_string()
|
||||
}
|
||||
Self::RegularExpressionLiteral(body, flags) => {
|
||||
format!(
|
||||
"/{}/{}",
|
||||
interner.resolve_expect(body),
|
||||
interner.resolve_expect(flags),
|
||||
)
|
||||
}
|
||||
Self::LineTerminator => "line terminator".to_owned(),
|
||||
Self::Comment => "comment".to_owned(),
|
||||
}
|
||||
}
|
||||
}
|
||||
101
javascript-engine/external/boa/boa_parser/src/lib.rs
vendored
Normal file
101
javascript-engine/external/boa/boa_parser/src/lib.rs
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
//! Boa's **`boa_parser`** crate is a parser targeting the latest [ECMAScript language specification][spec].
|
||||
//!
|
||||
//! # Crate Overview
|
||||
//! This crate contains implementations of a [`Lexer`] and a [`Parser`] for the **ECMAScript**
|
||||
//! language. The [lexical grammar][lex] and the [syntactic grammar][grammar] being targeted are
|
||||
//! fully defined in the specification. See the links provided for more information.
|
||||
//!
|
||||
//! # About Boa
|
||||
//! Boa is an open-source, experimental ECMAScript Engine written in Rust for lexing, parsing and executing ECMAScript/JavaScript. Currently, Boa
|
||||
//! supports some of the [language][boa-conformance]. More information can be viewed at [Boa's website][boa-web].
|
||||
//!
|
||||
//! Try out the most recent release with Boa's live demo [playground][boa-playground].
|
||||
//!
|
||||
//! # Boa Crates
|
||||
//! - **`boa_ast`** - Boa's ECMAScript Abstract Syntax Tree.
|
||||
//! - **`boa_engine`** - Boa's implementation of ECMAScript builtin objects and execution.
|
||||
//! - **`boa_gc`** - Boa's garbage collector.
|
||||
//! - **`boa_interner`** - Boa's string interner.
|
||||
//! - **`boa_parser`** - Boa's lexer and parser.
|
||||
//! - **`boa_profiler`** - Boa's code profiler.
|
||||
//! - **`boa_unicode`** - Boa's Unicode identifier.
|
||||
//! - **`boa_icu_provider`** - Boa's ICU4X data provider.
|
||||
//!
|
||||
//! [spec]: https://tc39.es/ecma262
|
||||
//! [lex]: https://tc39.es/ecma262/#sec-ecmascript-language-lexical-grammar
|
||||
//! [grammar]: https://tc39.es/ecma262/#sec-ecmascript-language-expressions
|
||||
//! [boa-conformance]: https://boa-dev.github.io/boa/test262/
|
||||
//! [boa-web]: https://boa-dev.github.io/
|
||||
//! [boa-playground]: https://boa-dev.github.io/boa/playground/
|
||||
|
||||
#![doc(
|
||||
html_logo_url = "https://raw.githubusercontent.com/boa-dev/boa/main/assets/logo.svg",
|
||||
html_favicon_url = "https://raw.githubusercontent.com/boa-dev/boa/main/assets/logo.svg"
|
||||
)]
|
||||
#![cfg_attr(not(test), forbid(clippy::unwrap_used))]
|
||||
#![warn(missing_docs, clippy::dbg_macro)]
|
||||
#![deny(
|
||||
// rustc lint groups https://doc.rust-lang.org/rustc/lints/groups.html
|
||||
warnings,
|
||||
future_incompatible,
|
||||
let_underscore,
|
||||
nonstandard_style,
|
||||
rust_2018_compatibility,
|
||||
rust_2018_idioms,
|
||||
rust_2021_compatibility,
|
||||
unused,
|
||||
|
||||
// rustc allowed-by-default lints https://doc.rust-lang.org/rustc/lints/listing/allowed-by-default.html
|
||||
macro_use_extern_crate,
|
||||
meta_variable_misuse,
|
||||
missing_abi,
|
||||
missing_copy_implementations,
|
||||
missing_debug_implementations,
|
||||
non_ascii_idents,
|
||||
noop_method_call,
|
||||
single_use_lifetimes,
|
||||
trivial_casts,
|
||||
trivial_numeric_casts,
|
||||
unreachable_pub,
|
||||
unsafe_op_in_unsafe_fn,
|
||||
unused_crate_dependencies,
|
||||
unused_import_braces,
|
||||
unused_lifetimes,
|
||||
unused_qualifications,
|
||||
unused_tuple_struct_fields,
|
||||
variant_size_differences,
|
||||
|
||||
// rustdoc lints https://doc.rust-lang.org/rustdoc/lints.html
|
||||
rustdoc::broken_intra_doc_links,
|
||||
rustdoc::private_intra_doc_links,
|
||||
rustdoc::missing_crate_level_docs,
|
||||
rustdoc::private_doc_tests,
|
||||
rustdoc::invalid_codeblock_attributes,
|
||||
rustdoc::invalid_rust_codeblocks,
|
||||
rustdoc::bare_urls,
|
||||
|
||||
// clippy categories https://doc.rust-lang.org/clippy/
|
||||
clippy::all,
|
||||
clippy::correctness,
|
||||
clippy::suspicious,
|
||||
clippy::style,
|
||||
clippy::complexity,
|
||||
clippy::perf,
|
||||
clippy::pedantic,
|
||||
clippy::nursery,
|
||||
)]
|
||||
#![allow(
|
||||
clippy::module_name_repetitions,
|
||||
clippy::too_many_lines,
|
||||
clippy::cognitive_complexity,
|
||||
clippy::let_unit_value,
|
||||
clippy::redundant_pub_crate
|
||||
)]
|
||||
|
||||
pub mod error;
|
||||
pub mod lexer;
|
||||
pub mod parser;
|
||||
|
||||
pub use error::Error;
|
||||
pub use lexer::Lexer;
|
||||
pub use parser::Parser;
|
||||
250
javascript-engine/external/boa/boa_parser/src/parser/cursor/buffered_lexer/mod.rs
vendored
Normal file
250
javascript-engine/external/boa/boa_parser/src/parser/cursor/buffered_lexer/mod.rs
vendored
Normal file
@@ -0,0 +1,250 @@
|
||||
use crate::{
|
||||
lexer::{InputElement, Lexer, Token, TokenKind},
|
||||
parser::ParseResult,
|
||||
Error,
|
||||
};
|
||||
use boa_ast::Position;
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
/// The maximum number of tokens which can be peeked ahead.
|
||||
const MAX_PEEK_SKIP: usize = 3;
|
||||
|
||||
/// The fixed size of the buffer used for storing values that are peeked ahead.
|
||||
///
|
||||
/// The size is calculated for a worst case scenario, where we want to peek `MAX_PEEK_SKIP` tokens
|
||||
/// skipping line terminators, and the stream ends just after:
|
||||
/// ```text
|
||||
/// [\n, B, \n, C, \n, D, \n, E, \n, F, None]
|
||||
/// 0 0 1 1 2 2 3 3 4 4 5
|
||||
/// ```
|
||||
const PEEK_BUF_SIZE: usize = (MAX_PEEK_SKIP + 1) * 2 + 1;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct BufferedLexer<R> {
|
||||
lexer: Lexer<R>,
|
||||
peeked: [Option<Token>; PEEK_BUF_SIZE],
|
||||
read_index: usize,
|
||||
write_index: usize,
|
||||
}
|
||||
|
||||
impl<R> From<Lexer<R>> for BufferedLexer<R>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
fn from(lexer: Lexer<R>) -> Self {
|
||||
Self {
|
||||
lexer,
|
||||
peeked: [
|
||||
None::<Token>,
|
||||
None::<Token>,
|
||||
None::<Token>,
|
||||
None::<Token>,
|
||||
None::<Token>,
|
||||
None::<Token>,
|
||||
None::<Token>,
|
||||
None::<Token>,
|
||||
None::<Token>,
|
||||
],
|
||||
read_index: 0,
|
||||
write_index: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> From<R> for BufferedLexer<R>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
fn from(reader: R) -> Self {
|
||||
Lexer::new(reader).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> BufferedLexer<R>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
/// Sets the goal symbol for the lexer.
|
||||
pub(super) fn set_goal(&mut self, elm: InputElement) {
|
||||
let _timer = Profiler::global().start_event("cursor::set_goal()", "Parsing");
|
||||
self.lexer.set_goal(elm);
|
||||
}
|
||||
|
||||
/// Lexes the next tokens as a regex assuming that the starting '/' has already been consumed.
|
||||
pub(super) fn lex_regex(
|
||||
&mut self,
|
||||
start: Position,
|
||||
interner: &mut Interner,
|
||||
) -> ParseResult<Token> {
|
||||
let _timer = Profiler::global().start_event("cursor::lex_regex()", "Parsing");
|
||||
self.set_goal(InputElement::RegExp);
|
||||
self.lexer
|
||||
.lex_slash_token(start, interner)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Lexes the next tokens as template middle or template tail assuming that the starting
|
||||
/// '}' has already been consumed.
|
||||
pub(super) fn lex_template(
|
||||
&mut self,
|
||||
start: Position,
|
||||
interner: &mut Interner,
|
||||
) -> ParseResult<Token> {
|
||||
self.lexer
|
||||
.lex_template(start, interner)
|
||||
.map_err(Error::from)
|
||||
}
|
||||
|
||||
pub(super) const fn strict_mode(&self) -> bool {
|
||||
self.lexer.strict_mode()
|
||||
}
|
||||
|
||||
pub(super) fn set_strict_mode(&mut self, strict_mode: bool) {
|
||||
self.lexer.set_strict_mode(strict_mode);
|
||||
}
|
||||
|
||||
/// Fills the peeking buffer with the next token.
|
||||
///
|
||||
/// It will not fill two line terminators one after the other.
|
||||
fn fill(&mut self, interner: &mut Interner) -> ParseResult<()> {
|
||||
debug_assert!(
|
||||
self.write_index < PEEK_BUF_SIZE,
|
||||
"write index went out of bounds"
|
||||
);
|
||||
|
||||
let previous_index = self.write_index.checked_sub(1).unwrap_or(PEEK_BUF_SIZE - 1);
|
||||
|
||||
if let Some(ref token) = self.peeked[previous_index] {
|
||||
if token.kind() == &TokenKind::LineTerminator {
|
||||
// We don't want to have multiple contiguous line terminators in the buffer, since
|
||||
// they have no meaning.
|
||||
let next = loop {
|
||||
let next = self.lexer.next(interner)?;
|
||||
if let Some(ref token) = next {
|
||||
if token.kind() != &TokenKind::LineTerminator {
|
||||
break next;
|
||||
}
|
||||
} else {
|
||||
break None;
|
||||
}
|
||||
};
|
||||
|
||||
self.peeked[self.write_index] = next;
|
||||
} else {
|
||||
self.peeked[self.write_index] = self.lexer.next(interner)?;
|
||||
}
|
||||
} else {
|
||||
self.peeked[self.write_index] = self.lexer.next(interner)?;
|
||||
}
|
||||
self.write_index = (self.write_index + 1) % PEEK_BUF_SIZE;
|
||||
|
||||
debug_assert_ne!(
|
||||
self.read_index, self.write_index,
|
||||
"we reached the read index with the write index"
|
||||
);
|
||||
debug_assert!(
|
||||
self.read_index < PEEK_BUF_SIZE,
|
||||
"read index went out of bounds"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Moves the cursor to the next token and returns the token.
|
||||
///
|
||||
/// If `skip_line_terminators` is true then line terminators will be discarded.
|
||||
///
|
||||
/// This follows iterator semantics in that a `peek(0, false)` followed by a `next(false)` will
|
||||
/// return the same value. Note that because a `peek(n, false)` may return a line terminator a
|
||||
/// subsequent `next(true)` may not return the same value.
|
||||
pub(super) fn next(
|
||||
&mut self,
|
||||
skip_line_terminators: bool,
|
||||
interner: &mut Interner,
|
||||
) -> ParseResult<Option<Token>> {
|
||||
if self.read_index == self.write_index {
|
||||
self.fill(interner)?;
|
||||
}
|
||||
|
||||
if let Some(ref token) = self.peeked[self.read_index] {
|
||||
if skip_line_terminators && token.kind() == &TokenKind::LineTerminator {
|
||||
// We only store 1 contiguous line terminator, so if the one at `self.read_index`
|
||||
// was a line terminator, we know that the next won't be one.
|
||||
self.read_index = (self.read_index + 1) % PEEK_BUF_SIZE;
|
||||
if self.read_index == self.write_index {
|
||||
self.fill(interner)?;
|
||||
}
|
||||
}
|
||||
let tok = self.peeked[self.read_index].take();
|
||||
self.read_index = (self.read_index + 1) % PEEK_BUF_SIZE;
|
||||
|
||||
Ok(tok)
|
||||
} else {
|
||||
// We do not update the read index, since we should always return `None` from now on.
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Peeks the `n`th token after the next token.
|
||||
///
|
||||
/// **Note:** `n` must be in the range `[0, 3]`.
|
||||
/// i.e. if there are tokens `A`, `B`, `C`, `D`, `E` and `peek(0, false)` returns `A` then:
|
||||
/// - `peek(1, false) == peek(1, true) == B`.
|
||||
/// - `peek(2, false)` will return `C`.
|
||||
/// where `A`, `B`, `C`, `D` and `E` are tokens but not line terminators.
|
||||
///
|
||||
/// If `skip_line_terminators` is `true` then line terminators will be discarded.
|
||||
/// i.e. If there are tokens `A`, `\n`, `B` and `peek(0, false)` is `A` then the following
|
||||
/// will hold:
|
||||
/// - `peek(0, true) == A`
|
||||
/// - `peek(0, false) == A`
|
||||
/// - `peek(1, true) == B`
|
||||
/// - `peek(1, false) == \n`
|
||||
/// - `peek(2, true) == None` (End of stream)
|
||||
/// - `peek(2, false) == B`
|
||||
pub(super) fn peek(
|
||||
&mut self,
|
||||
skip_n: usize,
|
||||
skip_line_terminators: bool,
|
||||
interner: &mut Interner,
|
||||
) -> ParseResult<Option<&Token>> {
|
||||
assert!(
|
||||
skip_n <= MAX_PEEK_SKIP,
|
||||
"you cannot skip more than {} elements",
|
||||
MAX_PEEK_SKIP
|
||||
);
|
||||
|
||||
let mut read_index = self.read_index;
|
||||
let mut count = 0;
|
||||
let res_token = loop {
|
||||
if read_index == self.write_index {
|
||||
self.fill(interner)?;
|
||||
}
|
||||
|
||||
if let Some(ref token) = self.peeked[read_index] {
|
||||
if skip_line_terminators && token.kind() == &TokenKind::LineTerminator {
|
||||
read_index = (read_index + 1) % PEEK_BUF_SIZE;
|
||||
// We only store 1 contiguous line terminator, so if the one at `self.read_index`
|
||||
// was a line terminator, we know that the next won't be one.
|
||||
if read_index == self.write_index {
|
||||
self.fill(interner)?;
|
||||
}
|
||||
}
|
||||
if count == skip_n {
|
||||
break self.peeked[read_index].as_ref();
|
||||
}
|
||||
} else {
|
||||
break None;
|
||||
}
|
||||
read_index = (read_index + 1) % PEEK_BUF_SIZE;
|
||||
count += 1;
|
||||
};
|
||||
|
||||
Ok(res_token)
|
||||
}
|
||||
}
|
||||
289
javascript-engine/external/boa/boa_parser/src/parser/cursor/buffered_lexer/tests.rs
vendored
Normal file
289
javascript-engine/external/boa/boa_parser/src/parser/cursor/buffered_lexer/tests.rs
vendored
Normal file
@@ -0,0 +1,289 @@
|
||||
use crate::{
|
||||
lexer::{Token, TokenKind},
|
||||
parser::cursor::BufferedLexer,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
#[test]
|
||||
fn peek_skip_accending() {
|
||||
let mut cur = BufferedLexer::from(&b"a b c d e f g h i"[..]);
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
assert_eq!(
|
||||
*cur.peek(0, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("a", utf16!("a")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.peek(1, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("b", utf16!("b")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.peek(2, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("c", utf16!("c")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.peek(2, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("c", utf16!("c")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.peek(1, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("b", utf16!("b")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.peek(0, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("a", utf16!("a")))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn peek_skip_next() {
|
||||
let mut cur = BufferedLexer::from(&b"a b c d e f g h i"[..]);
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
assert_eq!(
|
||||
*cur.peek(0, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("a", utf16!("a")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.peek(1, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("b", utf16!("b")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.peek(2, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("c", utf16!("c")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.next(false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("a", utf16!("a")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.next(false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("b", utf16!("b")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.next(false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("c", utf16!("c")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.next(false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("d", utf16!("d")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.next(false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("e", utf16!("e")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.peek(0, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("f", utf16!("f")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.peek(1, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("g", utf16!("g")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.peek(2, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("h", utf16!("h")))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn peek_skip_next_alternating() {
|
||||
let mut cur = BufferedLexer::from(&b"a b c d e f g h i"[..]);
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
assert_eq!(
|
||||
*cur.peek(0, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("a", utf16!("a")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.next(false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("a", utf16!("a")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.peek(1, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("c", utf16!("c")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.next(false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("b", utf16!("b")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.peek(1, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("d", utf16!("d")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.next(false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("c", utf16!("c")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.peek(2, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("f", utf16!("f")))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn peek_next_till_end() {
|
||||
let mut cur = BufferedLexer::from(&b"a b c d e f g h i"[..]);
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
loop {
|
||||
let peek = cur.peek(0, false, interner).unwrap().cloned();
|
||||
let next = cur.next(false, interner).unwrap();
|
||||
|
||||
assert_eq!(peek, next);
|
||||
|
||||
if peek.is_none() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn peek_skip_next_till_end() {
|
||||
let mut cur = BufferedLexer::from(&b"a b c d e f g h i"[..]);
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
let mut peeked: [Option<Token>; super::MAX_PEEK_SKIP + 1] =
|
||||
[None::<Token>, None::<Token>, None::<Token>, None::<Token>];
|
||||
|
||||
loop {
|
||||
for (i, peek) in peeked.iter_mut().enumerate() {
|
||||
*peek = cur.peek(i, false, interner).unwrap().cloned();
|
||||
}
|
||||
|
||||
for peek in &peeked {
|
||||
assert_eq!(&cur.next(false, interner).unwrap(), peek);
|
||||
}
|
||||
|
||||
if peeked[super::MAX_PEEK_SKIP - 1].is_none() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn skip_peeked_terminators() {
|
||||
let mut cur = BufferedLexer::from(&b"A \n B"[..]);
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
assert_eq!(
|
||||
*cur.peek(0, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("A", utf16!("A")))
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.peek(0, true, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("A", utf16!("A")))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
*cur.peek(1, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::LineTerminator,
|
||||
);
|
||||
assert_eq!(
|
||||
*cur.peek(1, true, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("B", utf16!("B"))) // This value is after the line terminator
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
*cur.peek(2, false, interner)
|
||||
.unwrap()
|
||||
.expect("Some value expected")
|
||||
.kind(),
|
||||
TokenKind::identifier(interner.get_or_intern_static("B", utf16!("B")))
|
||||
);
|
||||
// End of stream
|
||||
assert!(cur.peek(2, true, interner).unwrap().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn issue_1768() {
|
||||
let mut cur = BufferedLexer::from(&b"\n(\nx\n)\n"[..]);
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
assert!(cur.peek(3, true, interner).unwrap().is_none());
|
||||
}
|
||||
297
javascript-engine/external/boa/boa_parser/src/parser/cursor/mod.rs
vendored
Normal file
297
javascript-engine/external/boa/boa_parser/src/parser/cursor/mod.rs
vendored
Normal file
@@ -0,0 +1,297 @@
|
||||
//! Cursor implementation for the parser.
|
||||
mod buffered_lexer;
|
||||
|
||||
use crate::{
|
||||
lexer::{InputElement, Lexer, Token, TokenKind},
|
||||
parser::{OrAbrupt, ParseResult},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{Position, Punctuator};
|
||||
use boa_interner::Interner;
|
||||
use buffered_lexer::BufferedLexer;
|
||||
use std::io::Read;
|
||||
|
||||
/// The result of a peek for a semicolon.
|
||||
#[derive(Debug)]
|
||||
pub(super) enum SemicolonResult<'s> {
|
||||
Found(Option<&'s Token>),
|
||||
NotFound(&'s Token),
|
||||
}
|
||||
|
||||
/// Token cursor.
|
||||
///
|
||||
/// This internal structure gives basic testable operations to the parser.
|
||||
#[derive(Debug)]
|
||||
pub(super) struct Cursor<R> {
|
||||
buffered_lexer: BufferedLexer<R>,
|
||||
|
||||
/// Tracks the number of nested private environments that the cursor is in.
|
||||
private_environment_nested_index: usize,
|
||||
|
||||
/// Tracks the number of private environments on the root level of the code that is parsed.
|
||||
private_environment_root_index: usize,
|
||||
|
||||
/// Tracks if the cursor is in a arrow function declaration.
|
||||
arrow: bool,
|
||||
|
||||
/// Indicate if the cursor is used in `JSON.parse`.
|
||||
json_parse: bool,
|
||||
}
|
||||
|
||||
impl<R> Cursor<R>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
/// Creates a new cursor with the given reader.
|
||||
pub(super) fn new(reader: R) -> Self {
|
||||
Self {
|
||||
buffered_lexer: Lexer::new(reader).into(),
|
||||
private_environment_nested_index: 0,
|
||||
private_environment_root_index: 0,
|
||||
arrow: false,
|
||||
json_parse: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn set_goal(&mut self, elm: InputElement) {
|
||||
self.buffered_lexer.set_goal(elm);
|
||||
}
|
||||
|
||||
pub(super) fn lex_regex(
|
||||
&mut self,
|
||||
start: Position,
|
||||
interner: &mut Interner,
|
||||
) -> ParseResult<Token> {
|
||||
self.buffered_lexer.lex_regex(start, interner)
|
||||
}
|
||||
|
||||
pub(super) fn lex_template(
|
||||
&mut self,
|
||||
start: Position,
|
||||
interner: &mut Interner,
|
||||
) -> ParseResult<Token> {
|
||||
self.buffered_lexer.lex_template(start, interner)
|
||||
}
|
||||
|
||||
/// Advances the cursor and returns the next token.
|
||||
pub(super) fn next(&mut self, interner: &mut Interner) -> ParseResult<Option<Token>> {
|
||||
self.buffered_lexer.next(true, interner)
|
||||
}
|
||||
|
||||
/// Advances the cursor without returning the next token.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This function will panic if there is no further token in the cursor.
|
||||
#[track_caller]
|
||||
pub(super) fn advance(&mut self, interner: &mut Interner) {
|
||||
self.next(interner)
|
||||
.expect("tried to advance cursor, but the buffer was empty");
|
||||
}
|
||||
|
||||
/// Peeks a future token, without consuming it or advancing the cursor.
|
||||
///
|
||||
/// You can skip some tokens with the `skip_n` option.
|
||||
pub(super) fn peek(
|
||||
&mut self,
|
||||
skip_n: usize,
|
||||
interner: &mut Interner,
|
||||
) -> ParseResult<Option<&Token>> {
|
||||
self.buffered_lexer.peek(skip_n, true, interner)
|
||||
}
|
||||
|
||||
/// Gets the current strict mode for the cursor.
|
||||
pub(super) const fn strict_mode(&self) -> bool {
|
||||
self.buffered_lexer.strict_mode()
|
||||
}
|
||||
|
||||
/// Sets the strict mode to strict or non-strict.
|
||||
pub(super) fn set_strict_mode(&mut self, strict_mode: bool) {
|
||||
self.buffered_lexer.set_strict_mode(strict_mode);
|
||||
}
|
||||
|
||||
/// Returns if the cursor is currently in an arrow function declaration.
|
||||
pub(super) const fn arrow(&self) -> bool {
|
||||
self.arrow
|
||||
}
|
||||
|
||||
/// Set if the cursor is currently in a arrow function declaration.
|
||||
pub(super) fn set_arrow(&mut self, arrow: bool) {
|
||||
self.arrow = arrow;
|
||||
}
|
||||
|
||||
/// Returns if the cursor is currently used in `JSON.parse`.
|
||||
pub(super) const fn json_parse(&self) -> bool {
|
||||
self.json_parse
|
||||
}
|
||||
|
||||
/// Set if the cursor is currently used in `JSON.parse`.
|
||||
pub(super) fn set_json_parse(&mut self, json_parse: bool) {
|
||||
self.json_parse = json_parse;
|
||||
}
|
||||
|
||||
/// Push a new private environment.
|
||||
#[inline]
|
||||
pub(super) fn push_private_environment(&mut self) {
|
||||
if !self.in_class() {
|
||||
self.private_environment_root_index += 1;
|
||||
}
|
||||
|
||||
self.private_environment_nested_index += 1;
|
||||
}
|
||||
|
||||
/// Pop a private environment.
|
||||
#[inline]
|
||||
pub(super) fn pop_private_environment(&mut self) {
|
||||
self.private_environment_nested_index -= 1;
|
||||
}
|
||||
|
||||
/// Returns the current private environment root index.
|
||||
#[inline]
|
||||
pub(super) const fn private_environment_root_index(&self) -> usize {
|
||||
self.private_environment_root_index
|
||||
}
|
||||
|
||||
/// Returns if the cursor is in a class.
|
||||
#[inline]
|
||||
pub(super) const fn in_class(&self) -> bool {
|
||||
self.private_environment_nested_index != 0
|
||||
}
|
||||
|
||||
/// Returns an error if the next token is not of kind `kind`.
|
||||
pub(super) fn expect<K>(
|
||||
&mut self,
|
||||
kind: K,
|
||||
context: &'static str,
|
||||
interner: &mut Interner,
|
||||
) -> ParseResult<Token>
|
||||
where
|
||||
K: Into<TokenKind>,
|
||||
{
|
||||
let next_token = self.next(interner).or_abrupt()?;
|
||||
let kind = kind.into();
|
||||
|
||||
if next_token.kind() == &kind {
|
||||
Ok(next_token)
|
||||
} else {
|
||||
Err(Error::expected(
|
||||
[kind.to_string(interner)],
|
||||
next_token.to_string(interner),
|
||||
next_token.span(),
|
||||
context,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// It will peek for the next token, to see if it's a semicolon.
|
||||
///
|
||||
/// It will automatically insert a semicolon if needed, as specified in the [spec][spec].
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-automatic-semicolon-insertion
|
||||
pub(super) fn peek_semicolon(
|
||||
&mut self,
|
||||
interner: &mut Interner,
|
||||
) -> ParseResult<SemicolonResult<'_>> {
|
||||
self.buffered_lexer.peek(0, false, interner)?.map_or(
|
||||
Ok(SemicolonResult::Found(None)),
|
||||
|tk| match tk.kind() {
|
||||
TokenKind::Punctuator(Punctuator::Semicolon | Punctuator::CloseBlock)
|
||||
| TokenKind::LineTerminator => Ok(SemicolonResult::Found(Some(tk))),
|
||||
_ => Ok(SemicolonResult::NotFound(tk)),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/// Consumes the next token if it is a semicolon, or returns a `Errpr` if it's not.
|
||||
///
|
||||
/// It will automatically insert a semicolon if needed, as specified in the [spec][spec].
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-automatic-semicolon-insertion
|
||||
pub(super) fn expect_semicolon(
|
||||
&mut self,
|
||||
context: &'static str,
|
||||
interner: &mut Interner,
|
||||
) -> ParseResult<()> {
|
||||
match self.peek_semicolon(interner)? {
|
||||
SemicolonResult::Found(Some(tk)) => match *tk.kind() {
|
||||
TokenKind::Punctuator(Punctuator::Semicolon) | TokenKind::LineTerminator => {
|
||||
let _next = self.buffered_lexer.next(false, interner)?;
|
||||
Ok(())
|
||||
}
|
||||
_ => Ok(()),
|
||||
},
|
||||
SemicolonResult::Found(None) => Ok(()),
|
||||
SemicolonResult::NotFound(tk) => Err(Error::expected(
|
||||
[";".to_owned()],
|
||||
tk.to_string(interner),
|
||||
tk.span(),
|
||||
context,
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
/// It will make sure that the peeked token (skipping n tokens) is not a line terminator.
|
||||
///
|
||||
/// It expects that the token stream does not end here.
|
||||
///
|
||||
/// This is just syntatic sugar for a `.peek(skip_n)` call followed by a check that the result
|
||||
/// is not a line terminator or `None`.
|
||||
pub(super) fn peek_expect_no_lineterminator(
|
||||
&mut self,
|
||||
skip_n: usize,
|
||||
context: &'static str,
|
||||
interner: &mut Interner,
|
||||
) -> ParseResult<&Token> {
|
||||
let tok = self
|
||||
.buffered_lexer
|
||||
.peek(skip_n, false, interner)
|
||||
.or_abrupt()?;
|
||||
|
||||
if tok.kind() == &TokenKind::LineTerminator {
|
||||
Err(Error::unexpected(
|
||||
tok.to_string(interner),
|
||||
tok.span(),
|
||||
context,
|
||||
))
|
||||
} else {
|
||||
Ok(tok)
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if the peeked token is a line terminator.
|
||||
pub(super) fn peek_is_line_terminator(
|
||||
&mut self,
|
||||
skip_n: usize,
|
||||
interner: &mut Interner,
|
||||
) -> ParseResult<Option<bool>> {
|
||||
self.buffered_lexer
|
||||
.peek(skip_n, false, interner)?
|
||||
.map_or(Ok(None), |t| {
|
||||
Ok(Some(t.kind() == &TokenKind::LineTerminator))
|
||||
})
|
||||
}
|
||||
|
||||
/// Advance the cursor to the next token and retrieve it, only if it's of `kind` type.
|
||||
///
|
||||
/// When the next token is a `kind` token, get the token, otherwise return `None`.
|
||||
///
|
||||
/// No next token also returns None.
|
||||
pub(super) fn next_if<K>(
|
||||
&mut self,
|
||||
kind: K,
|
||||
interner: &mut Interner,
|
||||
) -> ParseResult<Option<Token>>
|
||||
where
|
||||
K: Into<TokenKind>,
|
||||
{
|
||||
Ok(if let Some(token) = self.peek(0, interner)? {
|
||||
if token.kind() == &kind.into() {
|
||||
self.next(interner)?
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
})
|
||||
}
|
||||
}
|
||||
245
javascript-engine/external/boa/boa_parser/src/parser/expression/assignment/arrow_function.rs
vendored
Normal file
245
javascript-engine/external/boa/boa_parser/src/parser/expression/assignment/arrow_function.rs
vendored
Normal file
@@ -0,0 +1,245 @@
|
||||
//! Arrow function parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/Arrow_functions
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-arrow-function-definitions
|
||||
|
||||
use super::AssignmentExpression;
|
||||
use crate::{
|
||||
error::{Error, ErrorContext, ParseResult},
|
||||
lexer::{Error as LexError, TokenKind},
|
||||
parser::{
|
||||
expression::BindingIdentifier,
|
||||
function::{FormalParameters, FunctionBody},
|
||||
name_in_lexically_declared_names, AllowAwait, AllowIn, AllowYield, Cursor, OrAbrupt,
|
||||
TokenParser,
|
||||
},
|
||||
};
|
||||
use ast::operations::{bound_names, top_level_lexically_declared_names};
|
||||
use boa_ast::{
|
||||
self as ast,
|
||||
declaration::Variable,
|
||||
expression::Identifier,
|
||||
function::{FormalParameter, FormalParameterList},
|
||||
operations::{contains, ContainsSymbol},
|
||||
statement::Return,
|
||||
Expression, Punctuator, StatementList,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Arrow function parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/Arrow_functions
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-ArrowFunction
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser) struct ArrowFunction {
|
||||
name: Option<Identifier>,
|
||||
allow_in: AllowIn,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl ArrowFunction {
|
||||
/// Creates a new `ArrowFunction` parser.
|
||||
pub(in crate::parser) fn new<N, I, Y, A>(
|
||||
name: N,
|
||||
allow_in: I,
|
||||
allow_yield: Y,
|
||||
allow_await: A,
|
||||
) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
I: Into<AllowIn>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_in: allow_in.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for ArrowFunction
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::function::ArrowFunction;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("ArrowFunction", "Parsing");
|
||||
let next_token = cursor.peek(0, interner).or_abrupt()?;
|
||||
|
||||
let (params, params_start_position) = if let TokenKind::Punctuator(Punctuator::OpenParen) =
|
||||
&next_token.kind()
|
||||
{
|
||||
// CoverParenthesizedExpressionAndArrowParameterList
|
||||
let params_start_position = cursor
|
||||
.expect(Punctuator::OpenParen, "arrow function", interner)?
|
||||
.span()
|
||||
.end();
|
||||
|
||||
let params = FormalParameters::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
cursor.expect(Punctuator::CloseParen, "arrow function", interner)?;
|
||||
(params, params_start_position)
|
||||
} else {
|
||||
let params_start_position = next_token.span().start();
|
||||
let param = BindingIdentifier::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)
|
||||
.context("arrow function")?;
|
||||
(
|
||||
FormalParameterList::try_from(FormalParameter::new(
|
||||
Variable::from_identifier(param, None),
|
||||
false,
|
||||
))
|
||||
.expect("a single binding identifier without init is always a valid param list"),
|
||||
params_start_position,
|
||||
)
|
||||
};
|
||||
|
||||
cursor.peek_expect_no_lineterminator(0, "arrow function", interner)?;
|
||||
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::Arrow),
|
||||
"arrow function",
|
||||
interner,
|
||||
)?;
|
||||
let arrow = cursor.arrow();
|
||||
cursor.set_arrow(true);
|
||||
let body = ConciseBody::new(self.allow_in).parse(cursor, interner)?;
|
||||
cursor.set_arrow(arrow);
|
||||
|
||||
// Early Error: ArrowFormalParameters are UniqueFormalParameters.
|
||||
if params.has_duplicates() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Duplicate parameter name not allowed in this context".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: It is a Syntax Error if ArrowParameters Contains YieldExpression is true.
|
||||
if contains(¶ms, ContainsSymbol::YieldExpression) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Yield expression not allowed in this context".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: It is a Syntax Error if ArrowParameters Contains AwaitExpression is true.
|
||||
if contains(¶ms, ContainsSymbol::AwaitExpression) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Await expression not allowed in this context".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: It is a Syntax Error if ConciseBodyContainsUseStrict of ConciseBody is true
|
||||
// and IsSimpleParameterList of ArrowParameters is false.
|
||||
if body.strict() && !params.is_simple() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Illegal 'use strict' directive in function with non-simple parameter list".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
};
|
||||
|
||||
// It is a Syntax Error if any element of the BoundNames of ArrowParameters
|
||||
// also occurs in the LexicallyDeclaredNames of ConciseBody.
|
||||
// https://tc39.es/ecma262/#sec-arrow-function-definitions-static-semantics-early-errors
|
||||
name_in_lexically_declared_names(
|
||||
&bound_names(¶ms),
|
||||
&top_level_lexically_declared_names(&body),
|
||||
params_start_position,
|
||||
)?;
|
||||
|
||||
Ok(ast::function::ArrowFunction::new(self.name, params, body))
|
||||
}
|
||||
}
|
||||
|
||||
/// <https://tc39.es/ecma262/#prod-ConciseBody>
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser) struct ConciseBody {
|
||||
allow_in: AllowIn,
|
||||
}
|
||||
|
||||
impl ConciseBody {
|
||||
/// Creates a new `ConciseBody` parser.
|
||||
pub(in crate::parser) fn new<I>(allow_in: I) -> Self
|
||||
where
|
||||
I: Into<AllowIn>,
|
||||
{
|
||||
Self {
|
||||
allow_in: allow_in.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for ConciseBody
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = StatementList;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
match cursor.peek(0, interner).or_abrupt()?.kind() {
|
||||
TokenKind::Punctuator(Punctuator::OpenBlock) => {
|
||||
cursor.advance(interner);
|
||||
let body = FunctionBody::new(false, false).parse(cursor, interner)?;
|
||||
cursor.expect(Punctuator::CloseBlock, "arrow function", interner)?;
|
||||
Ok(body)
|
||||
}
|
||||
_ => Ok(StatementList::from(vec![ast::Statement::Return(
|
||||
Return::new(
|
||||
ExpressionBody::new(self.allow_in, false)
|
||||
.parse(cursor, interner)?
|
||||
.into(),
|
||||
),
|
||||
)
|
||||
.into()])),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <https://tc39.es/ecma262/#prod-ExpressionBody>
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct ExpressionBody {
|
||||
allow_in: AllowIn,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl ExpressionBody {
|
||||
/// Creates a new `ExpressionBody` parser.
|
||||
pub(super) fn new<I, A>(allow_in: I, allow_await: A) -> Self
|
||||
where
|
||||
I: Into<AllowIn>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_in: allow_in.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for ExpressionBody
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Expression;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
AssignmentExpression::new(None, self.allow_in, false, self.allow_await)
|
||||
.parse(cursor, interner)
|
||||
}
|
||||
}
|
||||
200
javascript-engine/external/boa/boa_parser/src/parser/expression/assignment/async_arrow_function.rs
vendored
Normal file
200
javascript-engine/external/boa/boa_parser/src/parser/expression/assignment/async_arrow_function.rs
vendored
Normal file
@@ -0,0 +1,200 @@
|
||||
//! Async arrow function parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/Arrow_functions
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-async-arrow-function-definitions
|
||||
|
||||
use super::arrow_function::ExpressionBody;
|
||||
use crate::{
|
||||
error::{Error, ErrorContext, ParseResult},
|
||||
lexer::{Error as LexError, TokenKind},
|
||||
parser::{
|
||||
expression::BindingIdentifier,
|
||||
function::{FormalParameters, FunctionBody},
|
||||
name_in_lexically_declared_names, AllowIn, AllowYield, Cursor, OrAbrupt, TokenParser,
|
||||
},
|
||||
};
|
||||
use ast::{
|
||||
operations::{bound_names, contains, top_level_lexically_declared_names, ContainsSymbol},
|
||||
Keyword,
|
||||
};
|
||||
use boa_ast::{
|
||||
self as ast,
|
||||
declaration::Variable,
|
||||
expression::Identifier,
|
||||
function::{FormalParameter, FormalParameterList},
|
||||
statement::Return,
|
||||
Punctuator, StatementList,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Async arrow function parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/Arrow_functions
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-AsyncArrowFunction
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser) struct AsyncArrowFunction {
|
||||
name: Option<Identifier>,
|
||||
allow_in: AllowIn,
|
||||
allow_yield: AllowYield,
|
||||
}
|
||||
|
||||
impl AsyncArrowFunction {
|
||||
/// Creates a new `AsyncArrowFunction` parser.
|
||||
pub(in crate::parser) fn new<N, I, Y>(name: N, allow_in: I, allow_yield: Y) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
I: Into<AllowIn>,
|
||||
Y: Into<AllowYield>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_in: allow_in.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for AsyncArrowFunction
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::function::AsyncArrowFunction;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("AsyncArrowFunction", "Parsing");
|
||||
|
||||
cursor.expect((Keyword::Async, false), "async arrow function", interner)?;
|
||||
cursor.peek_expect_no_lineterminator(0, "async arrow function", interner)?;
|
||||
|
||||
let next_token = cursor.peek(0, interner).or_abrupt()?;
|
||||
let (params, params_start_position) = if let TokenKind::Punctuator(Punctuator::OpenParen) =
|
||||
&next_token.kind()
|
||||
{
|
||||
let params_start_position = cursor
|
||||
.expect(Punctuator::OpenParen, "async arrow function", interner)?
|
||||
.span()
|
||||
.end();
|
||||
|
||||
let params = FormalParameters::new(false, true).parse(cursor, interner)?;
|
||||
cursor.expect(Punctuator::CloseParen, "async arrow function", interner)?;
|
||||
(params, params_start_position)
|
||||
} else {
|
||||
let params_start_position = next_token.span().start();
|
||||
let param = BindingIdentifier::new(self.allow_yield, true)
|
||||
.parse(cursor, interner)
|
||||
.context("async arrow function")?;
|
||||
(
|
||||
FormalParameterList::try_from(FormalParameter::new(
|
||||
Variable::from_identifier(param, None),
|
||||
false,
|
||||
))
|
||||
.expect("a single binding identifier without init is always a valid param list"),
|
||||
params_start_position,
|
||||
)
|
||||
};
|
||||
|
||||
cursor.peek_expect_no_lineterminator(0, "async arrow function", interner)?;
|
||||
cursor.expect(Punctuator::Arrow, "async arrow function", interner)?;
|
||||
|
||||
let body = AsyncConciseBody::new(self.allow_in).parse(cursor, interner)?;
|
||||
|
||||
// Early Error: ArrowFormalParameters are UniqueFormalParameters.
|
||||
if params.has_duplicates() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Duplicate parameter name not allowed in this context".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: It is a Syntax Error if CoverCallExpressionAndAsyncArrowHead Contains YieldExpression is true.
|
||||
if contains(¶ms, ContainsSymbol::YieldExpression) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Yield expression not allowed in this context".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: It is a Syntax Error if CoverCallExpressionAndAsyncArrowHead Contains AwaitExpression is true.
|
||||
if contains(¶ms, ContainsSymbol::AwaitExpression) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Await expression not allowed in this context".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: It is a Syntax Error if AsyncConciseBodyContainsUseStrict of AsyncConciseBody is true and
|
||||
// IsSimpleParameterList of CoverCallExpressionAndAsyncArrowHead is false.
|
||||
if body.strict() && !params.is_simple() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Illegal 'use strict' directive in function with non-simple parameter list".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: It is a Syntax Error if any element of the BoundNames of CoverCallExpressionAndAsyncArrowHead
|
||||
// also occurs in the LexicallyDeclaredNames of AsyncConciseBody.
|
||||
name_in_lexically_declared_names(
|
||||
&bound_names(¶ms),
|
||||
&top_level_lexically_declared_names(&body),
|
||||
params_start_position,
|
||||
)?;
|
||||
|
||||
Ok(ast::function::AsyncArrowFunction::new(
|
||||
self.name, params, body,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// <https://tc39.es/ecma262/#prod-AsyncConciseBody>
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser) struct AsyncConciseBody {
|
||||
allow_in: AllowIn,
|
||||
}
|
||||
|
||||
impl AsyncConciseBody {
|
||||
/// Creates a new `AsyncConciseBody` parser.
|
||||
pub(in crate::parser) fn new<I>(allow_in: I) -> Self
|
||||
where
|
||||
I: Into<AllowIn>,
|
||||
{
|
||||
Self {
|
||||
allow_in: allow_in.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for AsyncConciseBody
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = StatementList;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
match cursor.peek(0, interner).or_abrupt()?.kind() {
|
||||
TokenKind::Punctuator(Punctuator::OpenBlock) => {
|
||||
cursor.advance(interner);
|
||||
let body = FunctionBody::new(false, true).parse(cursor, interner)?;
|
||||
cursor.expect(Punctuator::CloseBlock, "async arrow function", interner)?;
|
||||
Ok(body)
|
||||
}
|
||||
_ => Ok(StatementList::from(vec![ast::Statement::Return(
|
||||
Return::new(
|
||||
ExpressionBody::new(self.allow_in, true)
|
||||
.parse(cursor, interner)?
|
||||
.into(),
|
||||
),
|
||||
)
|
||||
.into()])),
|
||||
}
|
||||
}
|
||||
}
|
||||
101
javascript-engine/external/boa/boa_parser/src/parser/expression/assignment/conditional.rs
vendored
Normal file
101
javascript-engine/external/boa/boa_parser/src/parser/expression/assignment/conditional.rs
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
//! Conditional operator parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Conditional_Operator
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-conditional-operator
|
||||
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{
|
||||
expression::{AssignmentExpression, ShortCircuitExpression},
|
||||
AllowAwait, AllowIn, AllowYield, Cursor, ParseResult, TokenParser,
|
||||
},
|
||||
};
|
||||
use boa_ast::{
|
||||
expression::{operator::Conditional, Identifier},
|
||||
Expression, Punctuator,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Conditional expression parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Conditional_Operator
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-ConditionalExpression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser::expression) struct ConditionalExpression {
|
||||
name: Option<Identifier>,
|
||||
allow_in: AllowIn,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl ConditionalExpression {
|
||||
/// Creates a new `ConditionalExpression` parser.
|
||||
pub(in crate::parser::expression) fn new<N, I, Y, A>(
|
||||
name: N,
|
||||
allow_in: I,
|
||||
allow_yield: Y,
|
||||
allow_await: A,
|
||||
) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
I: Into<AllowIn>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_in: allow_in.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for ConditionalExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Expression;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("ConditionalExpression", "Parsing");
|
||||
let lhs = ShortCircuitExpression::new(
|
||||
self.name,
|
||||
self.allow_in,
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
if let Some(tok) = cursor.peek(0, interner)? {
|
||||
if tok.kind() == &TokenKind::Punctuator(Punctuator::Question) {
|
||||
cursor.advance(interner);
|
||||
let then_clause =
|
||||
AssignmentExpression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
cursor.expect(Punctuator::Colon, "conditional expression", interner)?;
|
||||
|
||||
let else_clause = AssignmentExpression::new(
|
||||
None,
|
||||
self.allow_in,
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
return Ok(Conditional::new(lhs, then_clause, else_clause).into());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(lhs)
|
||||
}
|
||||
}
|
||||
103
javascript-engine/external/boa/boa_parser/src/parser/expression/assignment/exponentiation.rs
vendored
Normal file
103
javascript-engine/external/boa/boa_parser/src/parser/expression/assignment/exponentiation.rs
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
//! Exponentiation operator parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Arithmetic_Operators#Exponentiation
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-exp-operator
|
||||
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{
|
||||
expression::{unary::UnaryExpression, update::UpdateExpression},
|
||||
AllowAwait, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
};
|
||||
use boa_ast::{
|
||||
expression::{
|
||||
operator::{binary::ArithmeticOp, Binary},
|
||||
Identifier,
|
||||
},
|
||||
Expression, Keyword, Punctuator,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Parses an exponentiation expression.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Arithmetic_Operators#Exponentiation
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-ExponentiationExpression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser::expression) struct ExponentiationExpression {
|
||||
name: Option<Identifier>,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl ExponentiationExpression {
|
||||
/// Creates a new `ExponentiationExpression` parser.
|
||||
pub(in crate::parser::expression) fn new<N, Y, A>(
|
||||
name: N,
|
||||
allow_yield: Y,
|
||||
allow_await: A,
|
||||
) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for ExponentiationExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Expression;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("ExponentiationExpression", "Parsing");
|
||||
|
||||
let next = cursor.peek(0, interner).or_abrupt()?;
|
||||
match next.kind() {
|
||||
TokenKind::Keyword((Keyword::Delete | Keyword::Void | Keyword::TypeOf, _))
|
||||
| TokenKind::Punctuator(
|
||||
Punctuator::Add | Punctuator::Sub | Punctuator::Not | Punctuator::Neg,
|
||||
) => {
|
||||
return UnaryExpression::new(self.name, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner);
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Await, _)) if self.allow_await.0 => {
|
||||
return UnaryExpression::new(self.name, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let lhs = UpdateExpression::new(self.name, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
if let Some(tok) = cursor.peek(0, interner)? {
|
||||
if let TokenKind::Punctuator(Punctuator::Exp) = tok.kind() {
|
||||
cursor.advance(interner);
|
||||
return Ok(Binary::new(
|
||||
ArithmeticOp::Exp.into(),
|
||||
lhs,
|
||||
self.parse(cursor, interner)?,
|
||||
)
|
||||
.into());
|
||||
}
|
||||
}
|
||||
Ok(lhs)
|
||||
}
|
||||
}
|
||||
299
javascript-engine/external/boa/boa_parser/src/parser/expression/assignment/mod.rs
vendored
Normal file
299
javascript-engine/external/boa/boa_parser/src/parser/expression/assignment/mod.rs
vendored
Normal file
@@ -0,0 +1,299 @@
|
||||
//! Assignment operator parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Assignment_Operators#Assignment
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-assignment-operators
|
||||
|
||||
mod arrow_function;
|
||||
mod async_arrow_function;
|
||||
mod conditional;
|
||||
mod exponentiation;
|
||||
mod r#yield;
|
||||
|
||||
use super::check_strict_arguments_or_eval;
|
||||
use crate::{
|
||||
lexer::{Error as LexError, InputElement, TokenKind},
|
||||
parser::{
|
||||
expression::assignment::{
|
||||
arrow_function::{ArrowFunction, ConciseBody},
|
||||
async_arrow_function::AsyncArrowFunction,
|
||||
conditional::ConditionalExpression,
|
||||
r#yield::YieldExpression,
|
||||
},
|
||||
name_in_lexically_declared_names, AllowAwait, AllowIn, AllowYield, Cursor, OrAbrupt,
|
||||
ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
expression::{
|
||||
operator::assign::{Assign, AssignOp, AssignTarget},
|
||||
Identifier,
|
||||
},
|
||||
operations::{bound_names, contains, top_level_lexically_declared_names, ContainsSymbol},
|
||||
Expression, Keyword, Punctuator,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
pub(super) use exponentiation::ExponentiationExpression;
|
||||
|
||||
/// Assignment expression parsing.
|
||||
///
|
||||
/// This can be one of the following:
|
||||
///
|
||||
/// - [`ConditionalExpression`](../conditional_operator/struct.ConditionalExpression.html)
|
||||
/// - `YieldExpression`
|
||||
/// - [`ArrowFunction`](../../function/arrow_function/struct.ArrowFunction.html)
|
||||
/// - `AsyncArrowFunction`
|
||||
/// - [`LeftHandSideExpression`][lhs] `=` `AssignmentExpression`
|
||||
/// - [`LeftHandSideExpression`][lhs] `AssignmentOperator` `AssignmentExpression`
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Assignment_Operators#Assignment
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-AssignmentExpression
|
||||
/// [lhs]: ../lhs_expression/struct.LeftHandSideExpression.html
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser) struct AssignmentExpression {
|
||||
name: Option<Identifier>,
|
||||
allow_in: AllowIn,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl AssignmentExpression {
|
||||
/// Creates a new `AssignmentExpression` parser.
|
||||
pub(in crate::parser) fn new<N, I, Y, A>(
|
||||
name: N,
|
||||
allow_in: I,
|
||||
allow_yield: Y,
|
||||
allow_await: A,
|
||||
) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
I: Into<AllowIn>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_in: allow_in.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for AssignmentExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Expression;
|
||||
|
||||
fn parse(mut self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Expression> {
|
||||
let _timer = Profiler::global().start_event("AssignmentExpression", "Parsing");
|
||||
cursor.set_goal(InputElement::RegExp);
|
||||
|
||||
match cursor.peek(0, interner).or_abrupt()?.kind() {
|
||||
// [+Yield]YieldExpression[?In, ?Await]
|
||||
TokenKind::Keyword((Keyword::Yield, _)) if self.allow_yield.0 => {
|
||||
return YieldExpression::new(self.allow_in, self.allow_await)
|
||||
.parse(cursor, interner)
|
||||
}
|
||||
// ArrowFunction[?In, ?Yield, ?Await] -> ArrowParameters[?Yield, ?Await] -> BindingIdentifier[?Yield, ?Await]
|
||||
TokenKind::Identifier(_) | TokenKind::Keyword((Keyword::Yield | Keyword::Await, _)) => {
|
||||
cursor.set_goal(InputElement::Div);
|
||||
|
||||
// Because we already peeked the identifier token, there may be a line terminator before the identifier token.
|
||||
// In that case we have to skip an additional token on the next peek.
|
||||
let skip_n = if cursor.peek_is_line_terminator(0, interner).or_abrupt()? {
|
||||
2
|
||||
} else {
|
||||
1
|
||||
};
|
||||
if let Ok(tok) =
|
||||
cursor.peek_expect_no_lineterminator(skip_n, "assignment expression", interner)
|
||||
{
|
||||
if tok.kind() == &TokenKind::Punctuator(Punctuator::Arrow) {
|
||||
return ArrowFunction::new(
|
||||
self.name,
|
||||
self.allow_in,
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)
|
||||
.map(Expression::ArrowFunction);
|
||||
}
|
||||
}
|
||||
}
|
||||
// AsyncArrowFunction[?In, ?Yield, ?Await]
|
||||
TokenKind::Keyword((Keyword::Async, _)) => {
|
||||
let skip_n = if cursor.peek_is_line_terminator(0, interner).or_abrupt()? {
|
||||
2
|
||||
} else {
|
||||
1
|
||||
};
|
||||
|
||||
if !cursor
|
||||
.peek_is_line_terminator(skip_n, interner)
|
||||
.or_abrupt()?
|
||||
&& matches!(
|
||||
cursor.peek(1, interner).or_abrupt()?.kind(),
|
||||
TokenKind::Identifier(_)
|
||||
| TokenKind::Keyword((Keyword::Yield | Keyword::Await, _))
|
||||
| TokenKind::Punctuator(Punctuator::OpenParen)
|
||||
)
|
||||
{
|
||||
return Ok(
|
||||
AsyncArrowFunction::new(self.name, self.allow_in, self.allow_yield)
|
||||
.parse(cursor, interner)?
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
cursor.set_goal(InputElement::Div);
|
||||
|
||||
let position = cursor.peek(0, interner).or_abrupt()?.span().start();
|
||||
let mut lhs = ConditionalExpression::new(
|
||||
self.name,
|
||||
self.allow_in,
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
// If the left hand side is a parameter list, we must parse an arrow function.
|
||||
if let Expression::FormalParameterList(parameters) = lhs {
|
||||
cursor.peek_expect_no_lineterminator(0, "arrow function", interner)?;
|
||||
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::Arrow),
|
||||
"arrow function",
|
||||
interner,
|
||||
)?;
|
||||
let arrow = cursor.arrow();
|
||||
cursor.set_arrow(true);
|
||||
let body = ConciseBody::new(self.allow_in).parse(cursor, interner)?;
|
||||
cursor.set_arrow(arrow);
|
||||
|
||||
// Early Error: ArrowFormalParameters are UniqueFormalParameters.
|
||||
if parameters.has_duplicates() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Duplicate parameter name not allowed in this context".into(),
|
||||
position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: It is a Syntax Error if ArrowParameters Contains YieldExpression is true.
|
||||
if contains(¶meters, ContainsSymbol::YieldExpression) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Yield expression not allowed in this context".into(),
|
||||
position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: It is a Syntax Error if ArrowParameters Contains AwaitExpression is true.
|
||||
if contains(¶meters, ContainsSymbol::AwaitExpression) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Await expression not allowed in this context".into(),
|
||||
position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: It is a Syntax Error if ConciseBodyContainsUseStrict of ConciseBody is true
|
||||
// and IsSimpleParameterList of ArrowParameters is false.
|
||||
if body.strict() && !parameters.is_simple() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Illegal 'use strict' directive in function with non-simple parameter list"
|
||||
.into(),
|
||||
position,
|
||||
)));
|
||||
}
|
||||
|
||||
// It is a Syntax Error if any element of the BoundNames of ArrowParameters
|
||||
// also occurs in the LexicallyDeclaredNames of ConciseBody.
|
||||
// https://tc39.es/ecma262/#sec-arrow-function-definitions-static-semantics-early-errors
|
||||
name_in_lexically_declared_names(
|
||||
&bound_names(¶meters),
|
||||
&top_level_lexically_declared_names(&body),
|
||||
position,
|
||||
)?;
|
||||
|
||||
return Ok(boa_ast::function::ArrowFunction::new(self.name, parameters, body).into());
|
||||
}
|
||||
|
||||
// Review if we are trying to assign to an invalid left hand side expression.
|
||||
if let Some(tok) = cursor.peek(0, interner)?.cloned() {
|
||||
match tok.kind() {
|
||||
TokenKind::Punctuator(Punctuator::Assign) => {
|
||||
if cursor.strict_mode() {
|
||||
if let Expression::Identifier(ident) = lhs {
|
||||
check_strict_arguments_or_eval(ident, position)?;
|
||||
}
|
||||
}
|
||||
|
||||
cursor.advance(interner);
|
||||
cursor.set_goal(InputElement::RegExp);
|
||||
|
||||
if let Some(target) =
|
||||
AssignTarget::from_expression(&lhs, cursor.strict_mode(), true)
|
||||
{
|
||||
if let AssignTarget::Identifier(ident) = target {
|
||||
self.name = Some(ident);
|
||||
}
|
||||
let expr = self.parse(cursor, interner)?;
|
||||
lhs = Assign::new(AssignOp::Assign, target, expr).into();
|
||||
} else {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Invalid left-hand side in assignment".into(),
|
||||
tok.span().start(),
|
||||
)));
|
||||
}
|
||||
}
|
||||
TokenKind::Punctuator(p) if p.as_assign_op().is_some() => {
|
||||
if cursor.strict_mode() {
|
||||
if let Expression::Identifier(ident) = lhs {
|
||||
check_strict_arguments_or_eval(ident, position)?;
|
||||
}
|
||||
}
|
||||
|
||||
cursor.advance(interner);
|
||||
if let Some(target) =
|
||||
AssignTarget::from_expression(&lhs, cursor.strict_mode(), false)
|
||||
{
|
||||
let assignop = p.as_assign_op().expect("assignop disappeared");
|
||||
if assignop == AssignOp::BoolAnd
|
||||
|| assignop == AssignOp::BoolOr
|
||||
|| assignop == AssignOp::Coalesce
|
||||
{
|
||||
if let AssignTarget::Identifier(ident) = target {
|
||||
self.name = Some(ident);
|
||||
}
|
||||
}
|
||||
|
||||
let rhs = self.parse(cursor, interner)?;
|
||||
lhs = Assign::new(assignop, target, rhs).into();
|
||||
} else {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Invalid left-hand side in assignment".into(),
|
||||
tok.span().start(),
|
||||
)));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(lhs)
|
||||
}
|
||||
}
|
||||
118
javascript-engine/external/boa/boa_parser/src/parser/expression/assignment/yield.rs
vendored
Normal file
118
javascript-engine/external/boa/boa_parser/src/parser/expression/assignment/yield.rs
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
//! `YieldExpression` parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/yield
|
||||
//! [spec]: https://tc39.es/ecma262/#prod-YieldExpression
|
||||
|
||||
use super::AssignmentExpression;
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{AllowAwait, AllowIn, Cursor, OrAbrupt, ParseResult, TokenParser},
|
||||
};
|
||||
use boa_ast::{expression::Yield, Expression, Keyword, Punctuator};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// `YieldExpression` parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/yield
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-YieldExpression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser) struct YieldExpression {
|
||||
allow_in: AllowIn,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl YieldExpression {
|
||||
/// Creates a new `YieldExpression` parser.
|
||||
pub(in crate::parser) fn new<I, A>(allow_in: I, allow_await: A) -> Self
|
||||
where
|
||||
I: Into<AllowIn>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_in: allow_in.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for YieldExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Expression;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("YieldExpression", "Parsing");
|
||||
|
||||
cursor.expect(
|
||||
TokenKind::Keyword((Keyword::Yield, false)),
|
||||
"yield expression",
|
||||
interner,
|
||||
)?;
|
||||
|
||||
if matches!(
|
||||
cursor.peek_is_line_terminator(0, interner)?,
|
||||
Some(true) | None
|
||||
) {
|
||||
return Ok(Yield::new(None, false).into());
|
||||
}
|
||||
|
||||
let token = cursor.peek(0, interner).or_abrupt()?;
|
||||
match token.kind() {
|
||||
TokenKind::Punctuator(Punctuator::Mul) => {
|
||||
cursor.advance(interner);
|
||||
let expr = AssignmentExpression::new(None, self.allow_in, true, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
Ok(Yield::new(Some(expr), true).into())
|
||||
}
|
||||
TokenKind::Identifier(_)
|
||||
| TokenKind::Punctuator(
|
||||
Punctuator::OpenParen
|
||||
| Punctuator::Add
|
||||
| Punctuator::Sub
|
||||
| Punctuator::Not
|
||||
| Punctuator::Neg
|
||||
| Punctuator::Inc
|
||||
| Punctuator::Dec
|
||||
| Punctuator::OpenBracket
|
||||
| Punctuator::OpenBlock
|
||||
| Punctuator::Div,
|
||||
)
|
||||
| TokenKind::Keyword((
|
||||
Keyword::Yield
|
||||
| Keyword::Await
|
||||
| Keyword::Delete
|
||||
| Keyword::Void
|
||||
| Keyword::TypeOf
|
||||
| Keyword::New
|
||||
| Keyword::This
|
||||
| Keyword::Function
|
||||
| Keyword::Class
|
||||
| Keyword::Async,
|
||||
_,
|
||||
))
|
||||
| TokenKind::BooleanLiteral(_)
|
||||
| TokenKind::NullLiteral
|
||||
| TokenKind::StringLiteral(_)
|
||||
| TokenKind::TemplateNoSubstitution(_)
|
||||
| TokenKind::NumericLiteral(_)
|
||||
| TokenKind::RegularExpressionLiteral(_, _)
|
||||
| TokenKind::TemplateMiddle(_) => {
|
||||
let expr = AssignmentExpression::new(None, self.allow_in, true, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
Ok(Yield::new(Some(expr), false).into())
|
||||
}
|
||||
_ => Ok(Yield::new(None, false).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
59
javascript-engine/external/boa/boa_parser/src/parser/expression/await_expr.rs
vendored
Normal file
59
javascript-engine/external/boa/boa_parser/src/parser/expression/await_expr.rs
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
//! Await expression parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/await
|
||||
//! [spec]: https://tc39.es/ecma262/#prod-AwaitExpression
|
||||
|
||||
use super::unary::UnaryExpression;
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{AllowYield, Cursor, ParseResult, TokenParser},
|
||||
};
|
||||
use boa_ast::{expression::Await, Keyword};
|
||||
use boa_interner::Interner;
|
||||
use std::io::Read;
|
||||
|
||||
/// Parses an await expression.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/await
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-AwaitExpression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser) struct AwaitExpression {
|
||||
allow_yield: AllowYield,
|
||||
}
|
||||
|
||||
impl AwaitExpression {
|
||||
/// Creates a new `AwaitExpression` parser.
|
||||
pub(in crate::parser) fn new<Y>(allow_yield: Y) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for AwaitExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Await;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
cursor.expect(
|
||||
TokenKind::Keyword((Keyword::Await, false)),
|
||||
"Await expression parsing",
|
||||
interner,
|
||||
)?;
|
||||
let expr = UnaryExpression::new(None, self.allow_yield, true).parse(cursor, interner)?;
|
||||
Ok(expr.into())
|
||||
}
|
||||
}
|
||||
232
javascript-engine/external/boa/boa_parser/src/parser/expression/identifiers.rs
vendored
Normal file
232
javascript-engine/external/boa/boa_parser/src/parser/expression/identifiers.rs
vendored
Normal file
@@ -0,0 +1,232 @@
|
||||
//! Identifiers parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-identifiers
|
||||
|
||||
use crate::{
|
||||
lexer::{Error as LexError, TokenKind},
|
||||
parser::{cursor::Cursor, AllowAwait, AllowYield, OrAbrupt, ParseResult, TokenParser},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{expression::Identifier, Keyword};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
pub(crate) const RESERVED_IDENTIFIERS_STRICT: [Sym; 9] = [
|
||||
Sym::IMPLEMENTS,
|
||||
Sym::INTERFACE,
|
||||
Sym::LET,
|
||||
Sym::PACKAGE,
|
||||
Sym::PRIVATE,
|
||||
Sym::PROTECTED,
|
||||
Sym::PUBLIC,
|
||||
Sym::STATIC,
|
||||
Sym::YIELD,
|
||||
];
|
||||
|
||||
/// Identifier reference parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-IdentifierReference
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser) struct IdentifierReference {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl IdentifierReference {
|
||||
/// Creates a new `IdentifierReference` parser.
|
||||
pub(in crate::parser) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for IdentifierReference
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Identifier;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("IdentifierReference", "Parsing");
|
||||
|
||||
let token = cursor.next(interner).or_abrupt()?;
|
||||
|
||||
match token.kind() {
|
||||
TokenKind::Identifier(ident)
|
||||
if cursor.strict_mode() && RESERVED_IDENTIFIERS_STRICT.contains(ident) =>
|
||||
{
|
||||
Err(Error::general(
|
||||
"using future reserved keyword not allowed in strict mode IdentifierReference",
|
||||
token.span().start(),
|
||||
))
|
||||
}
|
||||
TokenKind::Identifier(ident) => Ok(Identifier::new(*ident)),
|
||||
TokenKind::Keyword((Keyword::Let, _)) if cursor.strict_mode() => Err(Error::general(
|
||||
"using future reserved keyword not allowed in strict mode IdentifierReference",
|
||||
token.span().start(),
|
||||
)),
|
||||
TokenKind::Keyword((Keyword::Let, _)) => Ok(Identifier::new(Sym::LET)),
|
||||
TokenKind::Keyword((Keyword::Yield, _)) if self.allow_yield.0 => {
|
||||
// Early Error: It is a Syntax Error if this production has a [Yield] parameter and StringValue of Identifier is "yield".
|
||||
Err(Error::general(
|
||||
"Unexpected identifier",
|
||||
token.span().start(),
|
||||
))
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Yield, _)) if !self.allow_yield.0 => {
|
||||
if cursor.strict_mode() {
|
||||
return Err(Error::general(
|
||||
"Unexpected strict mode reserved word",
|
||||
token.span().start(),
|
||||
));
|
||||
}
|
||||
Ok(Identifier::new(Sym::YIELD))
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Await, _)) if self.allow_await.0 => {
|
||||
// Early Error: It is a Syntax Error if this production has an [Await] parameter and StringValue of Identifier is "await".
|
||||
Err(Error::general(
|
||||
"Unexpected identifier",
|
||||
token.span().start(),
|
||||
))
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Await, _)) if !self.allow_await.0 => {
|
||||
Ok(Identifier::new(Sym::AWAIT))
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Async, _)) => Ok(Identifier::new(Sym::ASYNC)),
|
||||
TokenKind::Keyword((Keyword::Of, _)) => Ok(Identifier::new(Sym::OF)),
|
||||
_ => Err(Error::unexpected(
|
||||
token.to_string(interner),
|
||||
token.span(),
|
||||
"IdentifierReference",
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Binding identifier parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-BindingIdentifier
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser) struct BindingIdentifier {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl BindingIdentifier {
|
||||
/// Creates a new `BindingIdentifier` parser.
|
||||
pub(in crate::parser) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for BindingIdentifier
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Identifier;
|
||||
|
||||
/// Strict mode parsing as per <https://tc39.es/ecma262/#sec-identifiers-static-semantics-early-errors>.
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("BindingIdentifier", "Parsing");
|
||||
|
||||
let next_token = cursor.next(interner).or_abrupt()?;
|
||||
|
||||
match next_token.kind() {
|
||||
TokenKind::Identifier(Sym::ARGUMENTS) if cursor.strict_mode() => {
|
||||
Err(Error::lex(LexError::Syntax(
|
||||
"unexpected identifier 'arguments' in strict mode".into(),
|
||||
next_token.span().start(),
|
||||
)))
|
||||
}
|
||||
TokenKind::Identifier(Sym::EVAL) if cursor.strict_mode() => {
|
||||
Err(Error::lex(LexError::Syntax(
|
||||
"unexpected identifier 'eval' in strict mode".into(),
|
||||
next_token.span().start(),
|
||||
)))
|
||||
}
|
||||
TokenKind::Identifier(ident) => {
|
||||
if cursor.strict_mode() && RESERVED_IDENTIFIERS_STRICT.contains(ident) {
|
||||
return Err(Error::general(
|
||||
"using future reserved keyword not allowed in strict mode",
|
||||
next_token.span().start(),
|
||||
));
|
||||
}
|
||||
Ok((*ident).into())
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Let, _)) if cursor.strict_mode() => {
|
||||
Err(Error::lex(LexError::Syntax(
|
||||
"unexpected identifier 'let' in strict mode".into(),
|
||||
next_token.span().start(),
|
||||
)))
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Let, _)) => Ok(Sym::LET.into()),
|
||||
TokenKind::Keyword((Keyword::Yield, _)) if self.allow_yield.0 => {
|
||||
// Early Error: It is a Syntax Error if this production has a [Yield] parameter and StringValue of Identifier is "yield".
|
||||
Err(Error::general(
|
||||
"Unexpected identifier",
|
||||
next_token.span().start(),
|
||||
))
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Yield, _)) if !self.allow_yield.0 => {
|
||||
if cursor.strict_mode() {
|
||||
Err(Error::general(
|
||||
"yield keyword in binding identifier not allowed in strict mode",
|
||||
next_token.span().start(),
|
||||
))
|
||||
} else {
|
||||
Ok(Sym::YIELD.into())
|
||||
}
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Await, _)) if cursor.arrow() => Ok(Sym::AWAIT.into()),
|
||||
TokenKind::Keyword((Keyword::Await, _)) if self.allow_await.0 => {
|
||||
// Early Error: It is a Syntax Error if this production has an [Await] parameter and StringValue of Identifier is "await".
|
||||
Err(Error::general(
|
||||
"Unexpected identifier",
|
||||
next_token.span().start(),
|
||||
))
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Await, _)) if !self.allow_await.0 => Ok(Sym::AWAIT.into()),
|
||||
TokenKind::Keyword((Keyword::Async, _)) => Ok(Sym::ASYNC.into()),
|
||||
TokenKind::Keyword((Keyword::Of, _)) => Ok(Sym::OF.into()),
|
||||
_ => Err(Error::expected(
|
||||
["identifier".to_owned()],
|
||||
next_token.to_string(interner),
|
||||
next_token.span(),
|
||||
"binding identifier",
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Label identifier parsing.
|
||||
///
|
||||
/// This seems to be the same as a `BindingIdentifier`.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-LabelIdentifier
|
||||
pub(in crate::parser) type LabelIdentifier = BindingIdentifier;
|
||||
116
javascript-engine/external/boa/boa_parser/src/parser/expression/left_hand_side/arguments.rs
vendored
Normal file
116
javascript-engine/external/boa/boa_parser/src/parser/expression/left_hand_side/arguments.rs
vendored
Normal file
@@ -0,0 +1,116 @@
|
||||
//! Argument parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Glossary/Argument
|
||||
//! [spec]: https://tc39.es/ecma262/#prod-Arguments
|
||||
|
||||
use crate::{
|
||||
lexer::{InputElement, TokenKind},
|
||||
parser::{
|
||||
expression::AssignmentExpression, AllowAwait, AllowYield, Cursor, OrAbrupt, ParseResult,
|
||||
TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{expression::Spread, Expression, Punctuator};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Parses a list of arguments.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Glossary/Argument
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-Arguments
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser::expression) struct Arguments {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl Arguments {
|
||||
/// Creates a new `Arguments` parser.
|
||||
pub(in crate::parser::expression) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for Arguments
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Box<[Expression]>;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("Arguments", "Parsing");
|
||||
|
||||
cursor.expect(Punctuator::OpenParen, "arguments", interner)?;
|
||||
let mut args = Vec::new();
|
||||
loop {
|
||||
cursor.set_goal(InputElement::RegExp);
|
||||
let next_token = cursor.peek(0, interner).or_abrupt()?;
|
||||
|
||||
match next_token.kind() {
|
||||
TokenKind::Punctuator(Punctuator::CloseParen) => {
|
||||
cursor.advance(interner);
|
||||
break;
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Comma) => {
|
||||
let next_token = cursor.next(interner)?.expect(", token vanished"); // Consume the token.
|
||||
|
||||
if args.is_empty() {
|
||||
return Err(Error::unexpected(
|
||||
next_token.to_string(interner),
|
||||
next_token.span(),
|
||||
None,
|
||||
));
|
||||
}
|
||||
|
||||
if cursor.next_if(Punctuator::CloseParen, interner)?.is_some() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if !args.is_empty() {
|
||||
return Err(Error::expected(
|
||||
[",".to_owned(), "}".to_owned()],
|
||||
next_token.to_string(interner),
|
||||
next_token.span(),
|
||||
"argument list",
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if cursor.next_if(Punctuator::Spread, interner)?.is_some() {
|
||||
args.push(
|
||||
Spread::new(
|
||||
AssignmentExpression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?,
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
} else {
|
||||
args.push(
|
||||
AssignmentExpression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?,
|
||||
);
|
||||
}
|
||||
}
|
||||
cursor.set_goal(InputElement::Div);
|
||||
Ok(args.into_boxed_slice())
|
||||
}
|
||||
}
|
||||
156
javascript-engine/external/boa/boa_parser/src/parser/expression/left_hand_side/call.rs
vendored
Normal file
156
javascript-engine/external/boa/boa_parser/src/parser/expression/left_hand_side/call.rs
vendored
Normal file
@@ -0,0 +1,156 @@
|
||||
//! Call expression parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Functions
|
||||
//! [spec]: https://tc39.es/ecma262/#prod-CallExpression
|
||||
|
||||
use super::arguments::Arguments;
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{
|
||||
expression::{left_hand_side::template::TaggedTemplateLiteral, Expression},
|
||||
AllowAwait, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use ast::function::PrivateName;
|
||||
use boa_ast::{
|
||||
self as ast,
|
||||
expression::{
|
||||
access::{PrivatePropertyAccess, SimplePropertyAccess},
|
||||
Call,
|
||||
},
|
||||
Punctuator,
|
||||
};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Parses a call expression.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-CallExpression
|
||||
#[derive(Debug)]
|
||||
pub(super) struct CallExpression {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
first_member_expr: ast::Expression,
|
||||
}
|
||||
|
||||
impl CallExpression {
|
||||
/// Creates a new `CallExpression` parser.
|
||||
pub(super) fn new<Y, A>(
|
||||
allow_yield: Y,
|
||||
allow_await: A,
|
||||
first_member_expr: ast::Expression,
|
||||
) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
first_member_expr,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for CallExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::Expression;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("CallExpression", "Parsing");
|
||||
|
||||
let token = cursor.peek(0, interner).or_abrupt()?;
|
||||
|
||||
let mut lhs = if token.kind() == &TokenKind::Punctuator(Punctuator::OpenParen) {
|
||||
let args =
|
||||
Arguments::new(self.allow_yield, self.allow_await).parse(cursor, interner)?;
|
||||
Call::new(self.first_member_expr, args).into()
|
||||
} else {
|
||||
let next_token = cursor.next(interner)?.expect("token vanished");
|
||||
return Err(Error::expected(
|
||||
["(".to_owned()],
|
||||
next_token.to_string(interner),
|
||||
next_token.span(),
|
||||
"call expression",
|
||||
));
|
||||
};
|
||||
|
||||
while let Some(tok) = cursor.peek(0, interner)? {
|
||||
let token = tok.clone();
|
||||
match token.kind() {
|
||||
TokenKind::Punctuator(Punctuator::OpenParen) => {
|
||||
let args = Arguments::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
lhs = ast::Expression::from(Call::new(lhs, args));
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Dot) => {
|
||||
cursor.advance(interner);
|
||||
|
||||
let access = match cursor.next(interner).or_abrupt()?.kind() {
|
||||
TokenKind::Identifier(name) => SimplePropertyAccess::new(lhs, *name).into(),
|
||||
TokenKind::Keyword((kw, _)) => {
|
||||
SimplePropertyAccess::new(lhs, kw.to_sym(interner)).into()
|
||||
}
|
||||
TokenKind::BooleanLiteral(true) => {
|
||||
SimplePropertyAccess::new(lhs, Sym::TRUE).into()
|
||||
}
|
||||
TokenKind::BooleanLiteral(false) => {
|
||||
SimplePropertyAccess::new(lhs, Sym::FALSE).into()
|
||||
}
|
||||
TokenKind::NullLiteral => SimplePropertyAccess::new(lhs, Sym::NULL).into(),
|
||||
TokenKind::PrivateIdentifier(name) => {
|
||||
if !cursor.in_class() {
|
||||
return Err(Error::general(
|
||||
"Private identifier outside of class",
|
||||
token.span().start(),
|
||||
));
|
||||
}
|
||||
PrivatePropertyAccess::new(lhs, PrivateName::new(*name)).into()
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::expected(
|
||||
["identifier".to_owned()],
|
||||
token.to_string(interner),
|
||||
token.span(),
|
||||
"call expression",
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
lhs = ast::Expression::PropertyAccess(access);
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenBracket) => {
|
||||
cursor.advance(interner);
|
||||
let idx = Expression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
cursor.expect(Punctuator::CloseBracket, "call expression", interner)?;
|
||||
lhs =
|
||||
ast::Expression::PropertyAccess(SimplePropertyAccess::new(lhs, idx).into());
|
||||
}
|
||||
TokenKind::TemplateNoSubstitution { .. } | TokenKind::TemplateMiddle { .. } => {
|
||||
lhs = TaggedTemplateLiteral::new(
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
tok.span().start(),
|
||||
lhs,
|
||||
)
|
||||
.parse(cursor, interner)?
|
||||
.into();
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
Ok(lhs)
|
||||
}
|
||||
}
|
||||
239
javascript-engine/external/boa/boa_parser/src/parser/expression/left_hand_side/member.rs
vendored
Normal file
239
javascript-engine/external/boa/boa_parser/src/parser/expression/left_hand_side/member.rs
vendored
Normal file
@@ -0,0 +1,239 @@
|
||||
//! Member expression parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [spec]: https://tc39.es/ecma262/#prod-MemberExpression
|
||||
|
||||
use super::arguments::Arguments;
|
||||
use crate::{
|
||||
lexer::{InputElement, TokenKind},
|
||||
parser::{
|
||||
expression::{
|
||||
left_hand_side::template::TaggedTemplateLiteral, primary::PrimaryExpression, Expression,
|
||||
},
|
||||
AllowAwait, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use ast::function::PrivateName;
|
||||
use boa_ast::{
|
||||
self as ast,
|
||||
expression::{
|
||||
access::{
|
||||
PrivatePropertyAccess, PropertyAccessField, SimplePropertyAccess, SuperPropertyAccess,
|
||||
},
|
||||
Call, Identifier, New,
|
||||
},
|
||||
Keyword, Punctuator,
|
||||
};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Parses a member expression.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-MemberExpression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct MemberExpression {
|
||||
name: Option<Identifier>,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl MemberExpression {
|
||||
/// Creates a new `MemberExpression` parser.
|
||||
pub(super) fn new<N, Y, A>(name: N, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for MemberExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::Expression;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("MemberExpression", "Parsing");
|
||||
|
||||
cursor.set_goal(InputElement::RegExp);
|
||||
|
||||
let token = cursor.peek(0, interner).or_abrupt()?;
|
||||
let mut lhs = match token.kind() {
|
||||
TokenKind::Keyword((Keyword::New | Keyword::Super, true)) => {
|
||||
return Err(Error::general(
|
||||
"keyword must not contain escaped characters",
|
||||
token.span().start(),
|
||||
));
|
||||
}
|
||||
TokenKind::Keyword((Keyword::New, false)) => {
|
||||
cursor.advance(interner);
|
||||
|
||||
if cursor.next_if(Punctuator::Dot, interner)?.is_some() {
|
||||
let token = cursor.next(interner).or_abrupt()?;
|
||||
match token.kind() {
|
||||
TokenKind::Identifier(Sym::TARGET) => {
|
||||
return Ok(ast::Expression::NewTarget)
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::general(
|
||||
"unexpected private identifier",
|
||||
token.span().start(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let lhs = self.parse(cursor, interner)?;
|
||||
let args = match cursor.peek(0, interner)? {
|
||||
Some(next) if next.kind() == &TokenKind::Punctuator(Punctuator::OpenParen) => {
|
||||
Arguments::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?
|
||||
}
|
||||
_ => Box::new([]),
|
||||
};
|
||||
let call_node = Call::new(lhs, args);
|
||||
|
||||
ast::Expression::from(New::from(call_node))
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Super, _)) => {
|
||||
cursor.advance(interner);
|
||||
let token = cursor.next(interner).or_abrupt()?;
|
||||
match token.kind() {
|
||||
TokenKind::Punctuator(Punctuator::Dot) => {
|
||||
let token = cursor.next(interner).or_abrupt()?;
|
||||
let field = match token.kind() {
|
||||
TokenKind::Identifier(name) => {
|
||||
SuperPropertyAccess::new(PropertyAccessField::from(*name))
|
||||
}
|
||||
TokenKind::Keyword((kw, _)) => {
|
||||
SuperPropertyAccess::new(kw.to_sym(interner).into())
|
||||
}
|
||||
TokenKind::BooleanLiteral(true) => {
|
||||
SuperPropertyAccess::new(Sym::TRUE.into())
|
||||
}
|
||||
TokenKind::BooleanLiteral(false) => {
|
||||
SuperPropertyAccess::new(Sym::FALSE.into())
|
||||
}
|
||||
TokenKind::NullLiteral => SuperPropertyAccess::new(Sym::NULL.into()),
|
||||
TokenKind::PrivateIdentifier(_) => {
|
||||
return Err(Error::general(
|
||||
"unexpected private identifier",
|
||||
token.span().start(),
|
||||
));
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::unexpected(
|
||||
token.to_string(interner),
|
||||
token.span(),
|
||||
"expected super property",
|
||||
));
|
||||
}
|
||||
};
|
||||
ast::Expression::PropertyAccess(field.into())
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenBracket) => {
|
||||
let expr = Expression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
cursor.expect(Punctuator::CloseBracket, "super property", interner)?;
|
||||
ast::Expression::PropertyAccess(
|
||||
SuperPropertyAccess::new(expr.into()).into(),
|
||||
)
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::unexpected(
|
||||
token.to_string(interner),
|
||||
token.span(),
|
||||
"expected super property",
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => PrimaryExpression::new(self.name, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?,
|
||||
};
|
||||
|
||||
cursor.set_goal(InputElement::TemplateTail);
|
||||
|
||||
while let Some(tok) = cursor.peek(0, interner)? {
|
||||
match tok.kind() {
|
||||
TokenKind::Punctuator(Punctuator::Dot) => {
|
||||
cursor
|
||||
.next(interner)?
|
||||
.expect("dot punctuator token disappeared"); // We move the parser forward.
|
||||
|
||||
let token = cursor.next(interner).or_abrupt()?;
|
||||
|
||||
let access = match token.kind() {
|
||||
TokenKind::Identifier(name) => SimplePropertyAccess::new(lhs, *name).into(),
|
||||
TokenKind::Keyword((kw, _)) => {
|
||||
SimplePropertyAccess::new(lhs, kw.to_sym(interner)).into()
|
||||
}
|
||||
TokenKind::BooleanLiteral(true) => {
|
||||
SimplePropertyAccess::new(lhs, Sym::TRUE).into()
|
||||
}
|
||||
TokenKind::BooleanLiteral(false) => {
|
||||
SimplePropertyAccess::new(lhs, Sym::FALSE).into()
|
||||
}
|
||||
TokenKind::NullLiteral => SimplePropertyAccess::new(lhs, Sym::NULL).into(),
|
||||
TokenKind::PrivateIdentifier(name) => {
|
||||
if !cursor.in_class() {
|
||||
return Err(Error::general(
|
||||
"Private identifier outside of class",
|
||||
token.span().start(),
|
||||
));
|
||||
}
|
||||
PrivatePropertyAccess::new(lhs, PrivateName::new(*name)).into()
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::expected(
|
||||
["identifier".to_owned()],
|
||||
token.to_string(interner),
|
||||
token.span(),
|
||||
"member expression",
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
lhs = ast::Expression::PropertyAccess(access);
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenBracket) => {
|
||||
cursor
|
||||
.next(interner)?
|
||||
.expect("open bracket punctuator token disappeared"); // We move the parser forward.
|
||||
let idx = Expression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
cursor.expect(Punctuator::CloseBracket, "member expression", interner)?;
|
||||
lhs =
|
||||
ast::Expression::PropertyAccess(SimplePropertyAccess::new(lhs, idx).into());
|
||||
}
|
||||
TokenKind::TemplateNoSubstitution { .. } | TokenKind::TemplateMiddle { .. } => {
|
||||
lhs = TaggedTemplateLiteral::new(
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
tok.span().start(),
|
||||
lhs,
|
||||
)
|
||||
.parse(cursor, interner)?
|
||||
.into();
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
Ok(lhs)
|
||||
}
|
||||
}
|
||||
130
javascript-engine/external/boa/boa_parser/src/parser/expression/left_hand_side/mod.rs
vendored
Normal file
130
javascript-engine/external/boa/boa_parser/src/parser/expression/left_hand_side/mod.rs
vendored
Normal file
@@ -0,0 +1,130 @@
|
||||
//! Left hand side expression parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Expressions_and_Operators#Left-hand-side_expressions
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-left-hand-side-expressions
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
mod arguments;
|
||||
mod call;
|
||||
mod member;
|
||||
mod optional;
|
||||
mod template;
|
||||
|
||||
use crate::{
|
||||
lexer::{InputElement, TokenKind},
|
||||
parser::{
|
||||
expression::left_hand_side::{
|
||||
arguments::Arguments, call::CallExpression, member::MemberExpression,
|
||||
optional::OptionalExpression,
|
||||
},
|
||||
AllowAwait, AllowYield, Cursor, ParseResult, TokenParser,
|
||||
},
|
||||
};
|
||||
use boa_ast::{
|
||||
expression::{Identifier, SuperCall},
|
||||
Expression, Keyword, Punctuator,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Parses a left hand side expression.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Expressions_and_Operators#Left-hand-side_expressions
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-LeftHandSideExpression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser) struct LeftHandSideExpression {
|
||||
name: Option<Identifier>,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl LeftHandSideExpression {
|
||||
/// Creates a new `LeftHandSideExpression` parser.
|
||||
pub(in crate::parser) fn new<N, Y, A>(name: N, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for LeftHandSideExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Expression;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
/// Checks if we need to parse a super call expression `super()`.
|
||||
///
|
||||
/// It first checks if the next token is `super`, and if it is, it checks if the second next
|
||||
/// token is the open parenthesis (`(`) punctuator.
|
||||
///
|
||||
/// This is needed because the `if let` chain is very complex, and putting it inline in the
|
||||
/// initialization of `lhs` would make it very hard to return an expression over all
|
||||
/// possible branches of the `if let`s. Instead, we extract the check into its own function,
|
||||
/// then use it inside the condition of a simple `if ... else` expression.
|
||||
fn is_super_call<R: Read>(
|
||||
cursor: &mut Cursor<R>,
|
||||
interner: &mut Interner,
|
||||
) -> ParseResult<bool> {
|
||||
if let Some(next) = cursor.peek(0, interner)? {
|
||||
if let TokenKind::Keyword((Keyword::Super, _)) = next.kind() {
|
||||
if let Some(next) = cursor.peek(1, interner)? {
|
||||
if next.kind() == &TokenKind::Punctuator(Punctuator::OpenParen) {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(false)
|
||||
}
|
||||
let _timer = Profiler::global().start_event("LeftHandSideExpression", "Parsing");
|
||||
|
||||
cursor.set_goal(InputElement::TemplateTail);
|
||||
|
||||
let mut lhs = if is_super_call(cursor, interner)? {
|
||||
cursor.advance(interner);
|
||||
let args =
|
||||
Arguments::new(self.allow_yield, self.allow_await).parse(cursor, interner)?;
|
||||
SuperCall::new(args).into()
|
||||
} else {
|
||||
let mut member = MemberExpression::new(self.name, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
if let Some(tok) = cursor.peek(0, interner)? {
|
||||
if tok.kind() == &TokenKind::Punctuator(Punctuator::OpenParen) {
|
||||
member = CallExpression::new(self.allow_yield, self.allow_await, member)
|
||||
.parse(cursor, interner)?;
|
||||
}
|
||||
}
|
||||
member
|
||||
};
|
||||
|
||||
if let Some(tok) = cursor.peek(0, interner)? {
|
||||
if tok.kind() == &TokenKind::Punctuator(Punctuator::Optional) {
|
||||
lhs = OptionalExpression::new(self.allow_yield, self.allow_await, lhs)
|
||||
.parse(cursor, interner)?
|
||||
.into();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(lhs)
|
||||
}
|
||||
}
|
||||
172
javascript-engine/external/boa/boa_parser/src/parser/expression/left_hand_side/optional/mod.rs
vendored
Normal file
172
javascript-engine/external/boa/boa_parser/src/parser/expression/left_hand_side/optional/mod.rs
vendored
Normal file
@@ -0,0 +1,172 @@
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::{
|
||||
lexer::{Token, TokenKind},
|
||||
parser::{
|
||||
cursor::Cursor, expression::left_hand_side::arguments::Arguments, expression::Expression,
|
||||
AllowAwait, AllowYield, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use ast::function::PrivateName;
|
||||
use boa_ast::{
|
||||
self as ast,
|
||||
expression::{access::PropertyAccessField, Optional, OptionalOperation, OptionalOperationKind},
|
||||
Punctuator,
|
||||
};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Parses an optional expression.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Optional_chaining
|
||||
/// [spec]: https://tc39.es/ecma262/multipage/ecmascript-language-expressions.html#prod-OptionalExpression
|
||||
#[derive(Debug, Clone)]
|
||||
pub(in crate::parser) struct OptionalExpression {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
target: ast::Expression,
|
||||
}
|
||||
|
||||
impl OptionalExpression {
|
||||
/// Creates a new `OptionalExpression` parser.
|
||||
pub(in crate::parser) fn new<Y, A>(
|
||||
allow_yield: Y,
|
||||
allow_await: A,
|
||||
target: ast::Expression,
|
||||
) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
target,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for OptionalExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Optional;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
fn parse_const_access<R: Read>(
|
||||
cursor: &mut Cursor<R>,
|
||||
token: &Token,
|
||||
interner: &mut Interner,
|
||||
) -> ParseResult<OptionalOperationKind> {
|
||||
let item = match token.kind() {
|
||||
TokenKind::Identifier(name) => OptionalOperationKind::SimplePropertyAccess {
|
||||
field: PropertyAccessField::Const(*name),
|
||||
},
|
||||
TokenKind::Keyword((kw, _)) => OptionalOperationKind::SimplePropertyAccess {
|
||||
field: PropertyAccessField::Const(kw.to_sym(interner)),
|
||||
},
|
||||
TokenKind::BooleanLiteral(true) => OptionalOperationKind::SimplePropertyAccess {
|
||||
field: PropertyAccessField::Const(Sym::TRUE),
|
||||
},
|
||||
TokenKind::BooleanLiteral(false) => OptionalOperationKind::SimplePropertyAccess {
|
||||
field: PropertyAccessField::Const(Sym::FALSE),
|
||||
},
|
||||
TokenKind::NullLiteral => OptionalOperationKind::SimplePropertyAccess {
|
||||
field: PropertyAccessField::Const(Sym::NULL),
|
||||
},
|
||||
TokenKind::PrivateIdentifier(name) => {
|
||||
if !cursor.in_class() {
|
||||
return Err(Error::general(
|
||||
"Private identifier outside of class",
|
||||
token.span().start(),
|
||||
));
|
||||
}
|
||||
|
||||
OptionalOperationKind::PrivatePropertyAccess {
|
||||
field: PrivateName::new(*name),
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::expected(
|
||||
["identifier".to_owned()],
|
||||
token.to_string(interner),
|
||||
token.span(),
|
||||
"optional chain",
|
||||
))
|
||||
}
|
||||
};
|
||||
Ok(item)
|
||||
}
|
||||
let _timer = Profiler::global().start_event("OptionalExpression", "Parsing");
|
||||
|
||||
let mut items = Vec::new();
|
||||
|
||||
while let Some(token) = cursor.peek(0, interner)? {
|
||||
let shorted = match token.kind() {
|
||||
TokenKind::Punctuator(Punctuator::Optional) => {
|
||||
cursor.advance(interner);
|
||||
true
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenParen | Punctuator::OpenBracket) => false,
|
||||
TokenKind::Punctuator(Punctuator::Dot) => {
|
||||
cursor.advance(interner);
|
||||
let field = cursor.next(interner).or_abrupt()?;
|
||||
|
||||
let item = parse_const_access(cursor, &field, interner)?;
|
||||
|
||||
items.push(OptionalOperation::new(item, false));
|
||||
continue;
|
||||
}
|
||||
TokenKind::TemplateMiddle(_) | TokenKind::TemplateNoSubstitution(_) => {
|
||||
return Err(Error::general(
|
||||
"Invalid tagged template on optional chain",
|
||||
token.span().start(),
|
||||
))
|
||||
}
|
||||
_ => break,
|
||||
};
|
||||
|
||||
let token = cursor.peek(0, interner).or_abrupt()?;
|
||||
|
||||
let item = match token.kind() {
|
||||
TokenKind::Punctuator(Punctuator::OpenParen) => {
|
||||
let args = Arguments::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
OptionalOperationKind::Call { args }
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenBracket) => {
|
||||
cursor
|
||||
.next(interner)?
|
||||
.expect("open bracket punctuator token disappeared"); // We move the parser forward.
|
||||
let idx = Expression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
cursor.expect(Punctuator::CloseBracket, "optional chain", interner)?;
|
||||
OptionalOperationKind::SimplePropertyAccess {
|
||||
field: PropertyAccessField::Expr(Box::new(idx)),
|
||||
}
|
||||
}
|
||||
TokenKind::TemplateMiddle(_) | TokenKind::TemplateNoSubstitution(_) => {
|
||||
return Err(Error::general(
|
||||
"Invalid tagged template on optional chain",
|
||||
token.span().start(),
|
||||
))
|
||||
}
|
||||
_ => {
|
||||
let token = cursor.next(interner)?.expect("token disappeared");
|
||||
parse_const_access(cursor, &token, interner)?
|
||||
}
|
||||
};
|
||||
|
||||
items.push(OptionalOperation::new(item, shorted));
|
||||
}
|
||||
|
||||
Ok(Optional::new(self.target, items.into()))
|
||||
}
|
||||
}
|
||||
91
javascript-engine/external/boa/boa_parser/src/parser/expression/left_hand_side/optional/tests.rs
vendored
Normal file
91
javascript-engine/external/boa/boa_parser/src/parser/expression/left_hand_side/optional/tests.rs
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
use crate::parser::tests::{check_invalid, check_parser};
|
||||
use boa_ast::{
|
||||
expression::{
|
||||
access::PropertyAccessField, literal::Literal, Identifier, Optional, OptionalOperation,
|
||||
OptionalOperationKind,
|
||||
},
|
||||
Expression, Statement,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn simple() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
check_parser(
|
||||
r#"5?.name"#,
|
||||
vec![Statement::Expression(
|
||||
Optional::new(
|
||||
Literal::Int(5).into(),
|
||||
vec![OptionalOperation::new(
|
||||
OptionalOperationKind::SimplePropertyAccess {
|
||||
field: PropertyAccessField::Const(
|
||||
interner.get_or_intern_static("name", utf16!("name")),
|
||||
),
|
||||
},
|
||||
true,
|
||||
)]
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn complex_chain() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
check_parser(
|
||||
r#"a?.b(true)?.["c"]"#,
|
||||
vec![Statement::Expression(
|
||||
Optional::new(
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
vec![
|
||||
OptionalOperation::new(
|
||||
OptionalOperationKind::SimplePropertyAccess {
|
||||
field: PropertyAccessField::Const(
|
||||
interner.get_or_intern_static("b", utf16!("b")),
|
||||
),
|
||||
},
|
||||
true,
|
||||
),
|
||||
OptionalOperation::new(
|
||||
OptionalOperationKind::Call {
|
||||
args: vec![Expression::Literal(Literal::Bool(true))].into(),
|
||||
},
|
||||
false,
|
||||
),
|
||||
OptionalOperation::new(
|
||||
OptionalOperationKind::SimplePropertyAccess {
|
||||
field: PropertyAccessField::Expr(Box::new(
|
||||
Literal::String(interner.get_or_intern_static("c", utf16!("c")))
|
||||
.into(),
|
||||
)),
|
||||
},
|
||||
true,
|
||||
),
|
||||
]
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reject_templates() {
|
||||
check_invalid("console.log?.`Hello`");
|
||||
check_invalid("console?.log`Hello`");
|
||||
check_invalid(
|
||||
r#"
|
||||
const a = console?.log
|
||||
`Hello`"#,
|
||||
);
|
||||
}
|
||||
99
javascript-engine/external/boa/boa_parser/src/parser/expression/left_hand_side/template.rs
vendored
Normal file
99
javascript-engine/external/boa/boa_parser/src/parser/expression/left_hand_side/template.rs
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{
|
||||
cursor::Cursor, expression::Expression, AllowAwait, AllowYield, OrAbrupt, ParseResult,
|
||||
TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{self as ast, expression::TaggedTemplate, Position, Punctuator};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Parses a tagged template.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-TemplateLiteral
|
||||
#[derive(Debug, Clone)]
|
||||
pub(super) struct TaggedTemplateLiteral {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
start: Position,
|
||||
tag: ast::Expression,
|
||||
}
|
||||
|
||||
impl TaggedTemplateLiteral {
|
||||
/// Creates a new `TaggedTemplateLiteral` parser.
|
||||
pub(super) fn new<Y, A>(
|
||||
allow_yield: Y,
|
||||
allow_await: A,
|
||||
start: Position,
|
||||
tag: ast::Expression,
|
||||
) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
start,
|
||||
tag,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for TaggedTemplateLiteral
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = TaggedTemplate;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("TaggedTemplateLiteral", "Parsing");
|
||||
|
||||
let mut raws = Vec::new();
|
||||
let mut cookeds = Vec::new();
|
||||
let mut exprs = Vec::new();
|
||||
|
||||
let mut token = cursor.next(interner).or_abrupt()?;
|
||||
|
||||
loop {
|
||||
match token.kind() {
|
||||
TokenKind::TemplateMiddle(template_string) => {
|
||||
raws.push(template_string.as_raw());
|
||||
cookeds.push(template_string.to_owned_cooked(interner).ok());
|
||||
exprs.push(
|
||||
Expression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?,
|
||||
);
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::CloseBlock),
|
||||
"template literal",
|
||||
interner,
|
||||
)?;
|
||||
}
|
||||
TokenKind::TemplateNoSubstitution(template_string) => {
|
||||
raws.push(template_string.as_raw());
|
||||
cookeds.push(template_string.to_owned_cooked(interner).ok());
|
||||
return Ok(TaggedTemplate::new(
|
||||
self.tag,
|
||||
raws.into_boxed_slice(),
|
||||
cookeds.into_boxed_slice(),
|
||||
exprs.into_boxed_slice(),
|
||||
));
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::general(
|
||||
"cannot parse tagged template literal",
|
||||
self.start,
|
||||
))
|
||||
}
|
||||
}
|
||||
token = cursor.lex_template(self.start, interner)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
65
javascript-engine/external/boa/boa_parser/src/parser/expression/left_hand_side/tests.rs
vendored
Normal file
65
javascript-engine/external/boa/boa_parser/src/parser/expression/left_hand_side/tests.rs
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
use crate::parser::tests::check_parser;
|
||||
use boa_ast::{
|
||||
expression::{access::SimplePropertyAccess, Call, Identifier},
|
||||
Expression, Statement,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
macro_rules! check_call_property_identifier {
|
||||
($property:literal) => {{
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
format!("a().{}", $property).as_str(),
|
||||
vec![Statement::Expression(Expression::PropertyAccess(
|
||||
SimplePropertyAccess::new(
|
||||
Call::new(
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Box::default(),
|
||||
)
|
||||
.into(),
|
||||
interner.get_or_intern_static($property, utf16!($property)),
|
||||
)
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_call_properties() {
|
||||
check_call_property_identifier!("prop");
|
||||
check_call_property_identifier!("true");
|
||||
check_call_property_identifier!("false");
|
||||
check_call_property_identifier!("null");
|
||||
check_call_property_identifier!("let");
|
||||
}
|
||||
|
||||
macro_rules! check_member_property_identifier {
|
||||
($property:literal) => {{
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
format!("a.{}", $property).as_str(),
|
||||
vec![Statement::Expression(Expression::PropertyAccess(
|
||||
SimplePropertyAccess::new(
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
interner.get_or_intern_static($property, utf16!($property)),
|
||||
)
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_member_properties() {
|
||||
check_member_property_identifier!("prop");
|
||||
check_member_property_identifier!("true");
|
||||
check_member_property_identifier!("false");
|
||||
check_member_property_identifier!("null");
|
||||
check_member_property_identifier!("let");
|
||||
}
|
||||
751
javascript-engine/external/boa/boa_parser/src/parser/expression/mod.rs
vendored
Normal file
751
javascript-engine/external/boa/boa_parser/src/parser/expression/mod.rs
vendored
Normal file
@@ -0,0 +1,751 @@
|
||||
//! Expression parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-ecmascript-language-expressions
|
||||
|
||||
mod assignment;
|
||||
mod identifiers;
|
||||
mod left_hand_side;
|
||||
mod primary;
|
||||
mod unary;
|
||||
mod update;
|
||||
|
||||
pub(in crate::parser) mod await_expr;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::{
|
||||
lexer::{InputElement, TokenKind},
|
||||
parser::{
|
||||
expression::assignment::ExponentiationExpression, AllowAwait, AllowIn, AllowYield, Cursor,
|
||||
OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
self as ast,
|
||||
expression::{
|
||||
operator::{
|
||||
binary::{BinaryOp, LogicalOp},
|
||||
Binary,
|
||||
},
|
||||
Identifier,
|
||||
},
|
||||
Keyword, Position, Punctuator,
|
||||
};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
pub(super) use self::{assignment::AssignmentExpression, primary::Initializer};
|
||||
pub(in crate::parser) use {
|
||||
identifiers::{BindingIdentifier, LabelIdentifier},
|
||||
left_hand_side::LeftHandSideExpression,
|
||||
primary::object_initializer::{
|
||||
AsyncGeneratorMethod, AsyncMethod, GeneratorMethod, PropertyName,
|
||||
},
|
||||
};
|
||||
|
||||
/// Generates an expression parser for a number of expressions whose production rules are of the following pattern.
|
||||
///
|
||||
/// ```text
|
||||
/// <TargetExpression>[allowed_identifiers]
|
||||
/// => <InnerExpression>[?allowed_identifiers]
|
||||
/// => <TargetExpression>[?allowed_identifiers] <op1> <InnerExpression>[?allowed_identifiers]
|
||||
/// => <TargetExpression>[?allowed_identifiers] <op2> <InnerExpression>[?allowed_identifiers]
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
/// This macro has 2 mandatory identifiers:
|
||||
/// - The `$name` identifier is the name of the `TargetExpression` struct that the parser will be implemented for.
|
||||
/// - The `$lower` identifier is the name of the `InnerExpression` struct according to the pattern above.
|
||||
///
|
||||
/// A list of punctuators (operands between the `TargetExpression` and `InnerExpression`) are passed as the third parameter.
|
||||
///
|
||||
/// The fifth parameter is an `Option<InputElement>` which sets the goal symbol to set before parsing (or None to leave it as is).
|
||||
macro_rules! expression {
|
||||
($name:ident, $lower:ident, [$( $op:path ),*], [$( $low_param:ident ),*], $goal:expr ) => {
|
||||
impl<R> TokenParser<R> for $name
|
||||
where
|
||||
R: Read
|
||||
{
|
||||
type Output = ast::Expression;
|
||||
|
||||
fn parse(mut self, cursor: &mut Cursor<R>, interner: &mut Interner)-> ParseResult<ast::Expression> {
|
||||
let _timer = Profiler::global().start_event(stringify!($name), "Parsing");
|
||||
|
||||
if $goal.is_some() {
|
||||
cursor.set_goal($goal.unwrap());
|
||||
}
|
||||
|
||||
let mut lhs = $lower::new($( self.$low_param ),*).parse(cursor, interner)?;
|
||||
self.name = None;
|
||||
while let Some(tok) = cursor.peek(0, interner)? {
|
||||
match *tok.kind() {
|
||||
TokenKind::Punctuator(op) if $( op == $op )||* => {
|
||||
cursor.advance(interner);
|
||||
lhs = Binary::new(
|
||||
op.as_binary_op().expect("Could not get binary operation."),
|
||||
lhs,
|
||||
$lower::new($( self.$low_param ),*).parse(cursor, interner)?
|
||||
).into();
|
||||
}
|
||||
_ => break
|
||||
}
|
||||
}
|
||||
|
||||
Ok(lhs)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Expression parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-Expression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct Expression {
|
||||
name: Option<Identifier>,
|
||||
allow_in: AllowIn,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl Expression {
|
||||
/// Creates a new `Expression` parser.
|
||||
pub(super) fn new<N, I, Y, A>(name: N, allow_in: I, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
I: Into<AllowIn>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_in: allow_in.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for Expression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::Expression;
|
||||
|
||||
fn parse(
|
||||
mut self,
|
||||
cursor: &mut Cursor<R>,
|
||||
interner: &mut Interner,
|
||||
) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("Expression", "Parsing");
|
||||
|
||||
let mut lhs =
|
||||
AssignmentExpression::new(self.name, self.allow_in, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
self.name = None;
|
||||
while let Some(tok) = cursor.peek(0, interner)? {
|
||||
match *tok.kind() {
|
||||
TokenKind::Punctuator(Punctuator::Comma) => {
|
||||
if cursor.peek(1, interner).or_abrupt()?.kind()
|
||||
== &TokenKind::Punctuator(Punctuator::CloseParen)
|
||||
{
|
||||
return Ok(lhs);
|
||||
}
|
||||
|
||||
if cursor.peek(1, interner).or_abrupt()?.kind()
|
||||
== &TokenKind::Punctuator(Punctuator::Spread)
|
||||
{
|
||||
return Ok(lhs);
|
||||
}
|
||||
|
||||
cursor.advance(interner);
|
||||
|
||||
lhs = Binary::new(
|
||||
Punctuator::Comma
|
||||
.as_binary_op()
|
||||
.expect("Could not get binary operation."),
|
||||
lhs,
|
||||
AssignmentExpression::new(
|
||||
self.name,
|
||||
self.allow_in,
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)?,
|
||||
)
|
||||
.into();
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
Ok(lhs)
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a logical expression expression.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Logical_Operators
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-ShortCircuitExpression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct ShortCircuitExpression {
|
||||
name: Option<Identifier>,
|
||||
allow_in: AllowIn,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
previous: PreviousExpr,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
enum PreviousExpr {
|
||||
None,
|
||||
Logical,
|
||||
Coalesce,
|
||||
}
|
||||
|
||||
impl ShortCircuitExpression {
|
||||
/// Creates a new `ShortCircuitExpression` parser.
|
||||
pub(super) fn new<N, I, Y, A>(name: N, allow_in: I, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
I: Into<AllowIn>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_in: allow_in.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
previous: PreviousExpr::None,
|
||||
}
|
||||
}
|
||||
|
||||
fn with_previous<N, I, Y, A>(
|
||||
name: N,
|
||||
allow_in: I,
|
||||
allow_yield: Y,
|
||||
allow_await: A,
|
||||
previous: PreviousExpr,
|
||||
) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
I: Into<AllowIn>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_in: allow_in.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
previous,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for ShortCircuitExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::Expression;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("ShortCircuitExpression", "Parsing");
|
||||
|
||||
let mut current_node =
|
||||
BitwiseORExpression::new(self.name, self.allow_in, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
let mut previous = self.previous;
|
||||
|
||||
while let Some(tok) = cursor.peek(0, interner)? {
|
||||
match tok.kind() {
|
||||
TokenKind::Punctuator(Punctuator::BoolAnd) => {
|
||||
if previous == PreviousExpr::Coalesce {
|
||||
return Err(Error::expected(
|
||||
["??".to_owned()],
|
||||
tok.to_string(interner), tok.span(),
|
||||
"logical expression (cannot use '??' without parentheses within '||' or '&&')",
|
||||
));
|
||||
}
|
||||
cursor.advance(interner);
|
||||
previous = PreviousExpr::Logical;
|
||||
let rhs = BitwiseORExpression::new(
|
||||
self.name,
|
||||
self.allow_in,
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
current_node =
|
||||
Binary::new(BinaryOp::Logical(LogicalOp::And), current_node, rhs).into();
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::BoolOr) => {
|
||||
if previous == PreviousExpr::Coalesce {
|
||||
return Err(Error::expected(
|
||||
["??".to_owned()],
|
||||
tok.to_string(interner), tok.span(),
|
||||
"logical expression (cannot use '??' without parentheses within '||' or '&&')",
|
||||
));
|
||||
}
|
||||
cursor.advance(interner);
|
||||
previous = PreviousExpr::Logical;
|
||||
let rhs = Self::with_previous(
|
||||
self.name,
|
||||
self.allow_in,
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
PreviousExpr::Logical,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
current_node =
|
||||
Binary::new(BinaryOp::Logical(LogicalOp::Or), current_node, rhs).into();
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Coalesce) => {
|
||||
if previous == PreviousExpr::Logical {
|
||||
return Err(Error::expected(
|
||||
["&&".to_owned(), "||".to_owned()],
|
||||
tok.to_string(interner),
|
||||
tok.span(),
|
||||
"cannot use '??' unparenthesized within '||' or '&&'",
|
||||
));
|
||||
}
|
||||
cursor.advance(interner);
|
||||
previous = PreviousExpr::Coalesce;
|
||||
let rhs = BitwiseORExpression::new(
|
||||
self.name,
|
||||
self.allow_in,
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
current_node =
|
||||
Binary::new(BinaryOp::Logical(LogicalOp::Coalesce), current_node, rhs)
|
||||
.into();
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
Ok(current_node)
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a bitwise `OR` expression.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Bitwise_Operators#Bitwise_OR
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-BitwiseORExpression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct BitwiseORExpression {
|
||||
name: Option<Identifier>,
|
||||
allow_in: AllowIn,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl BitwiseORExpression {
|
||||
/// Creates a new `BitwiseORExpression` parser.
|
||||
pub(super) fn new<N, I, Y, A>(name: N, allow_in: I, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
I: Into<AllowIn>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_in: allow_in.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expression!(
|
||||
BitwiseORExpression,
|
||||
BitwiseXORExpression,
|
||||
[Punctuator::Or],
|
||||
[name, allow_in, allow_yield, allow_await],
|
||||
None::<InputElement>
|
||||
);
|
||||
|
||||
/// Parses a bitwise `XOR` expression.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Bitwise_Operators#Bitwise_XOR
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-BitwiseXORExpression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct BitwiseXORExpression {
|
||||
name: Option<Identifier>,
|
||||
allow_in: AllowIn,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl BitwiseXORExpression {
|
||||
/// Creates a new `BitwiseXORExpression` parser.
|
||||
pub(super) fn new<N, I, Y, A>(name: N, allow_in: I, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
I: Into<AllowIn>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_in: allow_in.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expression!(
|
||||
BitwiseXORExpression,
|
||||
BitwiseANDExpression,
|
||||
[Punctuator::Xor],
|
||||
[name, allow_in, allow_yield, allow_await],
|
||||
None::<InputElement>
|
||||
);
|
||||
|
||||
/// Parses a bitwise `AND` expression.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Bitwise_Operators#Bitwise_AND
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-BitwiseANDExpression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct BitwiseANDExpression {
|
||||
name: Option<Identifier>,
|
||||
allow_in: AllowIn,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl BitwiseANDExpression {
|
||||
/// Creates a new `BitwiseANDExpression` parser.
|
||||
pub(super) fn new<N, I, Y, A>(name: N, allow_in: I, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
I: Into<AllowIn>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_in: allow_in.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expression!(
|
||||
BitwiseANDExpression,
|
||||
EqualityExpression,
|
||||
[Punctuator::And],
|
||||
[name, allow_in, allow_yield, allow_await],
|
||||
None::<InputElement>
|
||||
);
|
||||
|
||||
/// Parses an equality expression.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Comparison_Operators#Equality_operators
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-equality-operators
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct EqualityExpression {
|
||||
name: Option<Identifier>,
|
||||
allow_in: AllowIn,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl EqualityExpression {
|
||||
/// Creates a new `EqualityExpression` parser.
|
||||
pub(super) fn new<N, I, Y, A>(name: N, allow_in: I, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
I: Into<AllowIn>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_in: allow_in.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expression!(
|
||||
EqualityExpression,
|
||||
RelationalExpression,
|
||||
[
|
||||
Punctuator::Eq,
|
||||
Punctuator::NotEq,
|
||||
Punctuator::StrictEq,
|
||||
Punctuator::StrictNotEq
|
||||
],
|
||||
[name, allow_in, allow_yield, allow_await],
|
||||
None::<InputElement>
|
||||
);
|
||||
|
||||
/// Parses a relational expression.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Comparison_Operators#Relational_operators
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-relational-operators
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct RelationalExpression {
|
||||
name: Option<Identifier>,
|
||||
allow_in: AllowIn,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl RelationalExpression {
|
||||
/// Creates a new `RelationalExpression` parser.
|
||||
pub(super) fn new<N, I, Y, A>(name: N, allow_in: I, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
I: Into<AllowIn>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_in: allow_in.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for RelationalExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::Expression;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("Relation Expression", "Parsing");
|
||||
|
||||
let mut lhs = ShiftExpression::new(self.name, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
while let Some(tok) = cursor.peek(0, interner)? {
|
||||
match *tok.kind() {
|
||||
TokenKind::Punctuator(op)
|
||||
if op == Punctuator::LessThan
|
||||
|| op == Punctuator::GreaterThan
|
||||
|| op == Punctuator::LessThanOrEq
|
||||
|| op == Punctuator::GreaterThanOrEq =>
|
||||
{
|
||||
cursor.advance(interner);
|
||||
lhs = Binary::new(
|
||||
op.as_binary_op().expect("Could not get binary operation."),
|
||||
lhs,
|
||||
ShiftExpression::new(self.name, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?,
|
||||
)
|
||||
.into();
|
||||
}
|
||||
TokenKind::Keyword((Keyword::InstanceOf | Keyword::In, true)) => {
|
||||
return Err(Error::general(
|
||||
"Keyword must not contain escaped characters",
|
||||
tok.span().start(),
|
||||
));
|
||||
}
|
||||
TokenKind::Keyword((op, false))
|
||||
if op == Keyword::InstanceOf
|
||||
|| (op == Keyword::In && self.allow_in == AllowIn(true)) =>
|
||||
{
|
||||
cursor.advance(interner);
|
||||
lhs = Binary::new(
|
||||
op.as_binary_op().expect("Could not get binary operation."),
|
||||
lhs,
|
||||
ShiftExpression::new(self.name, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?,
|
||||
)
|
||||
.into();
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
Ok(lhs)
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a bitwise shift expression.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Bitwise_Operators#Bitwise_shift_operators
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-bitwise-shift-operators
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct ShiftExpression {
|
||||
name: Option<Identifier>,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl ShiftExpression {
|
||||
/// Creates a new `ShiftExpression` parser.
|
||||
pub(super) fn new<N, Y, A>(name: N, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expression!(
|
||||
ShiftExpression,
|
||||
AdditiveExpression,
|
||||
[
|
||||
Punctuator::LeftSh,
|
||||
Punctuator::RightSh,
|
||||
Punctuator::URightSh
|
||||
],
|
||||
[name, allow_yield, allow_await],
|
||||
None::<InputElement>
|
||||
);
|
||||
|
||||
/// Parses an additive expression.
|
||||
///
|
||||
/// This can be either an addition or a subtraction.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Arithmetic_Operators
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-additive-operators
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct AdditiveExpression {
|
||||
name: Option<Identifier>,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl AdditiveExpression {
|
||||
/// Creates a new `AdditiveExpression` parser.
|
||||
pub(super) fn new<N, Y, A>(name: N, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expression!(
|
||||
AdditiveExpression,
|
||||
MultiplicativeExpression,
|
||||
[Punctuator::Add, Punctuator::Sub],
|
||||
[name, allow_yield, allow_await],
|
||||
None::<InputElement>
|
||||
);
|
||||
|
||||
/// Parses a multiplicative expression.
|
||||
///
|
||||
/// This can be either a multiplication, division or a modulo (remainder) expression.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Arithmetic_Operators#Division
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-multiplicative-operators
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct MultiplicativeExpression {
|
||||
name: Option<Identifier>,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl MultiplicativeExpression {
|
||||
/// Creates a new `MultiplicativeExpression` parser.
|
||||
pub(super) fn new<N, Y, A>(name: N, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expression!(
|
||||
MultiplicativeExpression,
|
||||
ExponentiationExpression,
|
||||
[Punctuator::Mul, Punctuator::Div, Punctuator::Mod],
|
||||
[name, allow_yield, allow_await],
|
||||
Some(InputElement::Div)
|
||||
);
|
||||
|
||||
/// Returns an error if `arguments` or `eval` are used as identifier in strict mode.
|
||||
const fn check_strict_arguments_or_eval(ident: Identifier, position: Position) -> ParseResult<()> {
|
||||
match ident.sym() {
|
||||
Sym::ARGUMENTS => Err(Error::general(
|
||||
"unexpected identifier 'arguments' in strict mode",
|
||||
position,
|
||||
)),
|
||||
Sym::EVAL => Err(Error::general(
|
||||
"unexpected identifier 'eval' in strict mode",
|
||||
position,
|
||||
)),
|
||||
_ => Ok(()),
|
||||
}
|
||||
}
|
||||
136
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/array_initializer/mod.rs
vendored
Normal file
136
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/array_initializer/mod.rs
vendored
Normal file
@@ -0,0 +1,136 @@
|
||||
//! Array initializer parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-array-initializer
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{
|
||||
expression::AssignmentExpression, AllowAwait, AllowYield, Cursor, OrAbrupt, ParseResult,
|
||||
TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
expression::{literal, Spread},
|
||||
Punctuator,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Parses an array literal.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-ArrayLiteral
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct ArrayLiteral {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl ArrayLiteral {
|
||||
/// Creates a new `ArrayLiteral` parser.
|
||||
pub(super) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for ArrayLiteral
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = literal::ArrayLiteral;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("ArrayLiteral", "Parsing");
|
||||
let mut elements = Vec::new();
|
||||
let mut has_trailing_comma_spread = false;
|
||||
let mut next_comma = false;
|
||||
let mut last_spread = false;
|
||||
|
||||
loop {
|
||||
let token = cursor.peek(0, interner).or_abrupt()?;
|
||||
match token.kind() {
|
||||
TokenKind::Punctuator(Punctuator::CloseBracket) => {
|
||||
cursor.advance(interner);
|
||||
break;
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Comma) if next_comma => {
|
||||
cursor.advance(interner);
|
||||
|
||||
if last_spread {
|
||||
let token = cursor.peek(0, interner).or_abrupt()?;
|
||||
if token.kind() == &TokenKind::Punctuator(Punctuator::CloseBracket) {
|
||||
has_trailing_comma_spread = true;
|
||||
}
|
||||
}
|
||||
|
||||
next_comma = false;
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Comma) => {
|
||||
cursor.advance(interner);
|
||||
elements.push(None);
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Spread) if next_comma => {
|
||||
return Err(Error::unexpected(
|
||||
token.to_string(interner),
|
||||
token.span(),
|
||||
"expected comma or end of array",
|
||||
));
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Spread) => {
|
||||
cursor.advance(interner);
|
||||
let node =
|
||||
AssignmentExpression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
elements.push(Some(Spread::new(node).into()));
|
||||
next_comma = true;
|
||||
last_spread = true;
|
||||
}
|
||||
_ if next_comma => {
|
||||
return Err(Error::unexpected(
|
||||
token.to_string(interner),
|
||||
token.span(),
|
||||
"expected comma or end of array",
|
||||
));
|
||||
}
|
||||
_ => {
|
||||
let expr =
|
||||
AssignmentExpression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
elements.push(Some(expr));
|
||||
next_comma = true;
|
||||
last_spread = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if last_spread && elements.last() == Some(&None) {
|
||||
has_trailing_comma_spread = true;
|
||||
}
|
||||
|
||||
Ok(literal::ArrayLiteral::new(
|
||||
elements,
|
||||
has_trailing_comma_spread,
|
||||
))
|
||||
}
|
||||
}
|
||||
135
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/array_initializer/tests.rs
vendored
Normal file
135
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/array_initializer/tests.rs
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
// ! Tests for array initializer parsing.
|
||||
|
||||
use crate::parser::tests::check_parser;
|
||||
use boa_ast::{
|
||||
expression::literal::{ArrayLiteral, Literal},
|
||||
Expression, Statement,
|
||||
};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_macros::utf16;
|
||||
|
||||
/// Checks an empty array.
|
||||
#[test]
|
||||
fn check_empty() {
|
||||
check_parser(
|
||||
"[]",
|
||||
vec![Statement::Expression(Expression::from(ArrayLiteral::from(vec![]))).into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks an array with empty slot.
|
||||
#[test]
|
||||
fn check_empty_slot() {
|
||||
check_parser(
|
||||
"[,]",
|
||||
vec![Statement::Expression(Expression::from(ArrayLiteral::from(vec![None]))).into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks a numeric array.
|
||||
#[test]
|
||||
fn check_numeric_array() {
|
||||
check_parser(
|
||||
"[1, 2, 3]",
|
||||
vec![
|
||||
Statement::Expression(Expression::from(ArrayLiteral::from(vec![
|
||||
Some(Literal::from(1).into()),
|
||||
Some(Literal::from(2).into()),
|
||||
Some(Literal::from(3).into()),
|
||||
])))
|
||||
.into(),
|
||||
],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
// Checks a numeric array with trailing comma
|
||||
#[test]
|
||||
fn check_numeric_array_trailing() {
|
||||
check_parser(
|
||||
"[1, 2, 3,]",
|
||||
vec![
|
||||
Statement::Expression(Expression::from(ArrayLiteral::from(vec![
|
||||
Some(Literal::from(1).into()),
|
||||
Some(Literal::from(2).into()),
|
||||
Some(Literal::from(3).into()),
|
||||
])))
|
||||
.into(),
|
||||
],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks a numeric array with an elision.
|
||||
#[test]
|
||||
fn check_numeric_array_elision() {
|
||||
check_parser(
|
||||
"[1, 2, , 3]",
|
||||
vec![
|
||||
Statement::Expression(Expression::from(ArrayLiteral::from(vec![
|
||||
Some(Literal::from(1).into()),
|
||||
Some(Literal::from(2).into()),
|
||||
None,
|
||||
Some(Literal::from(3).into()),
|
||||
])))
|
||||
.into(),
|
||||
],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks a numeric array with repeated elisions.
|
||||
#[test]
|
||||
fn check_numeric_array_repeated_elision() {
|
||||
check_parser(
|
||||
"[1, 2, ,, 3]",
|
||||
vec![
|
||||
Statement::Expression(Expression::from(ArrayLiteral::from(vec![
|
||||
Some(Literal::from(1).into()),
|
||||
Some(Literal::from(2).into()),
|
||||
None,
|
||||
None,
|
||||
Some(Literal::from(3).into()),
|
||||
])))
|
||||
.into(),
|
||||
],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks a combined array.
|
||||
#[test]
|
||||
fn check_combined() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"[1, \"a\", 2]",
|
||||
vec![
|
||||
Statement::Expression(Expression::from(ArrayLiteral::from(vec![
|
||||
Some(Literal::from(1).into()),
|
||||
Some(Literal::from(interner.get_or_intern_static("a", utf16!("a"))).into()),
|
||||
Some(Literal::from(2).into()),
|
||||
])))
|
||||
.into(),
|
||||
],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks a combined array with an empty string
|
||||
#[test]
|
||||
fn check_combined_empty_str() {
|
||||
check_parser(
|
||||
"[1, \"\", 2]",
|
||||
vec![
|
||||
Statement::Expression(Expression::from(ArrayLiteral::from(vec![
|
||||
Some(Literal::from(1).into()),
|
||||
Some(Literal::from(Sym::EMPTY_STRING).into()),
|
||||
Some(Literal::from(2).into()),
|
||||
])))
|
||||
.into(),
|
||||
],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,158 @@
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::{
|
||||
lexer::{Error as LexError, TokenKind},
|
||||
parser::{
|
||||
expression::BindingIdentifier,
|
||||
function::{FormalParameters, FunctionBody},
|
||||
name_in_lexically_declared_names, Cursor, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
expression::Identifier,
|
||||
function::AsyncFunction,
|
||||
operations::{bound_names, contains, top_level_lexically_declared_names, ContainsSymbol},
|
||||
Keyword, Punctuator,
|
||||
};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Async Function expression parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/async_function
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-AsyncFunctionExpression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct AsyncFunctionExpression {
|
||||
name: Option<Identifier>,
|
||||
}
|
||||
|
||||
impl AsyncFunctionExpression {
|
||||
/// Creates a new `AsyncFunctionExpression` parser.
|
||||
pub(super) fn new<N>(name: N) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
{
|
||||
Self { name: name.into() }
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for AsyncFunctionExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = AsyncFunction;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("AsyncFunctionExpression", "Parsing");
|
||||
cursor.peek_expect_no_lineterminator(0, "async function expression", interner)?;
|
||||
cursor.expect(
|
||||
(Keyword::Function, false),
|
||||
"async function expression",
|
||||
interner,
|
||||
)?;
|
||||
|
||||
let token = cursor.peek(0, interner).or_abrupt()?;
|
||||
let (name, name_span) = match token.kind() {
|
||||
TokenKind::Identifier(_)
|
||||
| TokenKind::Keyword((
|
||||
Keyword::Yield | Keyword::Await | Keyword::Async | Keyword::Of,
|
||||
_,
|
||||
)) => {
|
||||
let span = token.span();
|
||||
let name = BindingIdentifier::new(false, true).parse(cursor, interner)?;
|
||||
|
||||
(Some(name), span)
|
||||
}
|
||||
_ => (None, token.span()),
|
||||
};
|
||||
|
||||
let params_start_position = cursor
|
||||
.expect(Punctuator::OpenParen, "async function expression", interner)?
|
||||
.span()
|
||||
.end();
|
||||
|
||||
let params = FormalParameters::new(false, true).parse(cursor, interner)?;
|
||||
|
||||
cursor.expect(
|
||||
Punctuator::CloseParen,
|
||||
"async function expression",
|
||||
interner,
|
||||
)?;
|
||||
cursor.expect(Punctuator::OpenBlock, "async function expression", interner)?;
|
||||
|
||||
let body = FunctionBody::new(false, true).parse(cursor, interner)?;
|
||||
|
||||
cursor.expect(
|
||||
Punctuator::CloseBlock,
|
||||
"async function expression",
|
||||
interner,
|
||||
)?;
|
||||
|
||||
// Early Error: If the source code matching FormalParameters is strict mode code,
|
||||
// the Early Error rules for UniqueFormalParameters : FormalParameters are applied.
|
||||
if (cursor.strict_mode() || body.strict()) && params.has_duplicates() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Duplicate parameter name not allowed in this context".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: It is a Syntax Error if FunctionBodyContainsUseStrict of AsyncFunctionBody is true
|
||||
// and IsSimpleParameterList of FormalParameters is false.
|
||||
if body.strict() && !params.is_simple() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Illegal 'use strict' directive in function with non-simple parameter list".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: If BindingIdentifier is present and the source code matching BindingIdentifier is strict mode code,
|
||||
// it is a Syntax Error if the StringValue of BindingIdentifier is "eval" or "arguments".
|
||||
if let Some(name) = name {
|
||||
if (cursor.strict_mode() || body.strict())
|
||||
&& [Sym::EVAL, Sym::ARGUMENTS].contains(&name.sym())
|
||||
{
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"unexpected identifier 'eval' or 'arguments' in strict mode".into(),
|
||||
name_span.start(),
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
// Catch early error for BindingIdentifier, because strictness of the functions body is also
|
||||
// relevant for the function parameters.
|
||||
if body.strict() && contains(¶ms, ContainsSymbol::EvalOrArguments) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"unexpected identifier 'eval' or 'arguments' in strict mode".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// It is a Syntax Error if any element of the BoundNames of FormalParameters
|
||||
// also occurs in the LexicallyDeclaredNames of FunctionBody.
|
||||
// https://tc39.es/ecma262/#sec-function-definitions-static-semantics-early-errors
|
||||
name_in_lexically_declared_names(
|
||||
&bound_names(¶ms),
|
||||
&top_level_lexically_declared_names(&body),
|
||||
params_start_position,
|
||||
)?;
|
||||
|
||||
let function = AsyncFunction::new(name.or(self.name), params, body, name.is_some());
|
||||
|
||||
if contains(&function, ContainsSymbol::Super) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"invalid super usage".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
Ok(function)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,98 @@
|
||||
use crate::parser::tests::check_parser;
|
||||
use boa_ast::{
|
||||
declaration::{Declaration, LexicalDeclaration, Variable},
|
||||
expression::literal::Literal,
|
||||
function::{AsyncFunction, FormalParameterList},
|
||||
statement::Return,
|
||||
Statement, StatementListItem,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
/// Checks async expression parsing.
|
||||
#[test]
|
||||
fn check_async_expression() {
|
||||
let interner = &mut Interner::default();
|
||||
let add = interner.get_or_intern_static("add", utf16!("add"));
|
||||
check_parser(
|
||||
"const add = async function() {
|
||||
return 1;
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
add.into(),
|
||||
Some(
|
||||
AsyncFunction::new(
|
||||
Some(add.into()),
|
||||
FormalParameterList::default(),
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(Literal::from(1).into())),
|
||||
))]
|
||||
.into(),
|
||||
false,
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_nested_async_expression() {
|
||||
let interner = &mut Interner::default();
|
||||
let a = interner.get_or_intern_static("a", utf16!("a"));
|
||||
let b = interner.get_or_intern_static("b", utf16!("b"));
|
||||
check_parser(
|
||||
"const a = async function() {
|
||||
const b = async function() {
|
||||
return 1;
|
||||
};
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
a.into(),
|
||||
Some(
|
||||
AsyncFunction::new(
|
||||
Some(a.into()),
|
||||
FormalParameterList::default(),
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
b.into(),
|
||||
Some(
|
||||
AsyncFunction::new(
|
||||
Some(b.into()),
|
||||
FormalParameterList::default(),
|
||||
vec![Statement::Return(Return::new(Some(
|
||||
Literal::from(1).into(),
|
||||
)))
|
||||
.into()]
|
||||
.into(),
|
||||
false,
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()]
|
||||
.into(),
|
||||
false,
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,195 @@
|
||||
//! Async Generator Expression Parser
|
||||
//!
|
||||
//! Implements `TokenParser` fo`AsyncGeneratorExpression`on and outputs
|
||||
//! an `AsyncGeneratorExpr` ast node
|
||||
//!
|
||||
//! More information:
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [spec]: https://tc39.es/ecma262/#prod-AsyncGeneratorExpression
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::{
|
||||
lexer::{Error as LexError, TokenKind},
|
||||
parser::{
|
||||
expression::BindingIdentifier,
|
||||
function::{FormalParameters, FunctionBody},
|
||||
name_in_lexically_declared_names, Cursor, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
expression::Identifier,
|
||||
function::AsyncGenerator,
|
||||
operations::{bound_names, contains, top_level_lexically_declared_names, ContainsSymbol},
|
||||
Keyword, Punctuator,
|
||||
};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Async Generator Expression Parsing
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-AsyncGeneratorExpression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct AsyncGeneratorExpression {
|
||||
name: Option<Identifier>,
|
||||
}
|
||||
|
||||
impl AsyncGeneratorExpression {
|
||||
/// Creates a new `AsyncGeneratorExpression` parser.
|
||||
pub(in crate::parser) fn new<N>(name: N) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
{
|
||||
Self { name: name.into() }
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for AsyncGeneratorExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
//The below needs to be implemented in ast::node
|
||||
type Output = AsyncGenerator;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("AsyncGeneratorExpression", "Parsing");
|
||||
|
||||
cursor.peek_expect_no_lineterminator(0, "async generator expression", interner)?;
|
||||
cursor.expect(
|
||||
(Keyword::Function, false),
|
||||
"async generator expression",
|
||||
interner,
|
||||
)?;
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::Mul),
|
||||
"async generator expression",
|
||||
interner,
|
||||
)?;
|
||||
|
||||
let token = cursor.peek(0, interner).or_abrupt()?;
|
||||
let (name, name_span) = match token.kind() {
|
||||
TokenKind::Identifier(_)
|
||||
| TokenKind::Keyword((
|
||||
Keyword::Yield | Keyword::Await | Keyword::Async | Keyword::Of,
|
||||
_,
|
||||
)) => {
|
||||
let span = token.span();
|
||||
let name = BindingIdentifier::new(true, true).parse(cursor, interner)?;
|
||||
|
||||
(Some(name), span)
|
||||
}
|
||||
_ => (None, token.span()),
|
||||
};
|
||||
|
||||
let params_start_position = cursor
|
||||
.expect(
|
||||
Punctuator::OpenParen,
|
||||
"async generator expression",
|
||||
interner,
|
||||
)?
|
||||
.span()
|
||||
.end();
|
||||
|
||||
let params = FormalParameters::new(true, true).parse(cursor, interner)?;
|
||||
|
||||
// It is a Syntax Error if FormalParameters Contains YieldExpression is true.
|
||||
if contains(¶ms, ContainsSymbol::YieldExpression) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"yield expression not allowed in async generator expression parameters".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// It is a Syntax Error if FormalParameters Contains AwaitExpression is true.
|
||||
if contains(¶ms, ContainsSymbol::AwaitExpression) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"await expression not allowed in async generator expression parameters".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
cursor.expect(
|
||||
Punctuator::CloseParen,
|
||||
"async generator expression",
|
||||
interner,
|
||||
)?;
|
||||
cursor.expect(
|
||||
Punctuator::OpenBlock,
|
||||
"async generator expression",
|
||||
interner,
|
||||
)?;
|
||||
|
||||
let body = FunctionBody::new(true, true).parse(cursor, interner)?;
|
||||
|
||||
cursor.expect(
|
||||
Punctuator::CloseBlock,
|
||||
"async generator expression",
|
||||
interner,
|
||||
)?;
|
||||
|
||||
// Early Error: If the source code matching FormalParameters is strict mode code,
|
||||
// the Early Error rules for UniqueFormalParameters : FormalParameters are applied.
|
||||
if (cursor.strict_mode() || body.strict()) && params.has_duplicates() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Duplicate parameter name not allowed in this context".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: It is a Syntax Error if FunctionBodyContainsUseStrict of GeneratorBody is true
|
||||
// and IsSimpleParameterList of FormalParameters is false.
|
||||
if body.strict() && !params.is_simple() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Illegal 'use strict' directive in function with non-simple parameter list".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: If BindingIdentifier is present and the source code matching BindingIdentifier is strict mode code,
|
||||
// it is a Syntax Error if the StringValue of BindingIdentifier is "eval" or "arguments".
|
||||
if let Some(name) = name {
|
||||
if (cursor.strict_mode() || body.strict())
|
||||
&& [Sym::EVAL, Sym::ARGUMENTS].contains(&name.sym())
|
||||
{
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"unexpected identifier 'eval' or 'arguments' in strict mode".into(),
|
||||
name_span.start(),
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
// Catch early error for BindingIdentifier, because strictness of the functions body is also
|
||||
// relevant for the function parameters.
|
||||
if body.strict() && contains(¶ms, ContainsSymbol::EvalOrArguments) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"unexpected identifier 'eval' or 'arguments' in strict mode".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// It is a Syntax Error if any element of the BoundNames of FormalParameters
|
||||
// also occurs in the LexicallyDeclaredNames of FunctionBody.
|
||||
name_in_lexically_declared_names(
|
||||
&bound_names(¶ms),
|
||||
&top_level_lexically_declared_names(&body),
|
||||
params_start_position,
|
||||
)?;
|
||||
|
||||
let function = AsyncGenerator::new(name.or(self.name), params, body, name.is_some());
|
||||
|
||||
if contains(&function, ContainsSymbol::Super) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"invalid super usage".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
Ok(function)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,100 @@
|
||||
use std::convert::TryInto;
|
||||
|
||||
use crate::parser::tests::check_parser;
|
||||
use boa_ast::{
|
||||
declaration::{LexicalDeclaration, Variable},
|
||||
expression::literal::Literal,
|
||||
function::{AsyncGenerator, FormalParameterList},
|
||||
statement::Return,
|
||||
Declaration, Statement, StatementListItem,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
///checks async generator expression parsing
|
||||
|
||||
#[test]
|
||||
fn check_async_generator_expr() {
|
||||
let interner = &mut Interner::default();
|
||||
let add = interner.get_or_intern_static("add", utf16!("add"));
|
||||
check_parser(
|
||||
"const add = async function*(){
|
||||
return 1;
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
add.into(),
|
||||
Some(
|
||||
AsyncGenerator::new(
|
||||
Some(add.into()),
|
||||
FormalParameterList::default(),
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(Literal::from(1).into())),
|
||||
))]
|
||||
.into(),
|
||||
false,
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_nested_async_generator_expr() {
|
||||
let interner = &mut Interner::default();
|
||||
let a = interner.get_or_intern_static("a", utf16!("a"));
|
||||
let b = interner.get_or_intern_static("b", utf16!("b"));
|
||||
check_parser(
|
||||
"const a = async function*() {
|
||||
const b = async function*() {
|
||||
return 1;
|
||||
};
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
a.into(),
|
||||
Some(
|
||||
AsyncGenerator::new(
|
||||
Some(a.into()),
|
||||
FormalParameterList::default(),
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
b.into(),
|
||||
Some(
|
||||
AsyncGenerator::new(
|
||||
Some(b.into()),
|
||||
FormalParameterList::default(),
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(Literal::from(1).into())),
|
||||
))]
|
||||
.into(),
|
||||
false,
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()]
|
||||
.into(),
|
||||
false,
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
74
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/class_expression/mod.rs
vendored
Normal file
74
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/class_expression/mod.rs
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{
|
||||
expression::BindingIdentifier, statement::ClassTail, AllowAwait, AllowYield, Cursor,
|
||||
OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
};
|
||||
use boa_ast::{expression::Identifier, function::Class, Keyword};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Class expression parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-ClassExpression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct ClassExpression {
|
||||
name: Option<Identifier>,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl ClassExpression {
|
||||
/// Creates a new `ClassExpression` parser.
|
||||
pub(in crate::parser) fn new<N, Y, A>(name: N, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for ClassExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Class;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("ClassExpression", "Parsing");
|
||||
let strict = cursor.strict_mode();
|
||||
cursor.set_strict_mode(true);
|
||||
|
||||
let mut has_binding_identifier = false;
|
||||
let token = cursor.peek(0, interner).or_abrupt()?;
|
||||
let name = match token.kind() {
|
||||
TokenKind::Identifier(_) | TokenKind::Keyword((Keyword::Yield | Keyword::Await, _)) => {
|
||||
has_binding_identifier = true;
|
||||
BindingIdentifier::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?
|
||||
.into()
|
||||
}
|
||||
_ => self.name,
|
||||
};
|
||||
cursor.set_strict_mode(strict);
|
||||
|
||||
ClassTail::new(
|
||||
name,
|
||||
has_binding_identifier,
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)
|
||||
}
|
||||
}
|
||||
154
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/function_expression/mod.rs
vendored
Normal file
154
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/function_expression/mod.rs
vendored
Normal file
@@ -0,0 +1,154 @@
|
||||
//! Function expression parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/function
|
||||
//! [spec]: https://tc39.es/ecma262/#prod-FunctionExpression
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::{
|
||||
lexer::{Error as LexError, TokenKind},
|
||||
parser::{
|
||||
expression::BindingIdentifier,
|
||||
function::{FormalParameters, FunctionBody},
|
||||
name_in_lexically_declared_names, Cursor, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
expression::Identifier,
|
||||
function::Function,
|
||||
operations::{bound_names, contains, top_level_lexically_declared_names, ContainsSymbol},
|
||||
Keyword, Punctuator,
|
||||
};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Function expression parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/function
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-FunctionExpression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct FunctionExpression {
|
||||
name: Option<Identifier>,
|
||||
}
|
||||
|
||||
impl FunctionExpression {
|
||||
/// Creates a new `FunctionExpression` parser.
|
||||
pub(in crate::parser) fn new<N>(name: N) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
{
|
||||
Self { name: name.into() }
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for FunctionExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Function;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("FunctionExpression", "Parsing");
|
||||
|
||||
let token = cursor.peek(0, interner).or_abrupt()?;
|
||||
let (name, name_span) = match token.kind() {
|
||||
TokenKind::Identifier(_)
|
||||
| TokenKind::Keyword((
|
||||
Keyword::Yield | Keyword::Await | Keyword::Async | Keyword::Of,
|
||||
_,
|
||||
)) => {
|
||||
let span = token.span();
|
||||
let name = BindingIdentifier::new(false, false).parse(cursor, interner)?;
|
||||
|
||||
(Some(name), span)
|
||||
}
|
||||
_ => (None, token.span()),
|
||||
};
|
||||
|
||||
let params_start_position = cursor
|
||||
.expect(Punctuator::OpenParen, "function expression", interner)?
|
||||
.span()
|
||||
.end();
|
||||
|
||||
let params = FormalParameters::new(false, false).parse(cursor, interner)?;
|
||||
|
||||
cursor.expect(Punctuator::CloseParen, "function expression", interner)?;
|
||||
cursor.expect(Punctuator::OpenBlock, "function expression", interner)?;
|
||||
|
||||
let body = FunctionBody::new(false, false).parse(cursor, interner)?;
|
||||
|
||||
cursor.expect(Punctuator::CloseBlock, "function expression", interner)?;
|
||||
|
||||
// Early Error: If the source code matching FormalParameters is strict mode code,
|
||||
// the Early Error rules for UniqueFormalParameters : FormalParameters are applied.
|
||||
if (cursor.strict_mode() || body.strict()) && params.has_duplicates() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Duplicate parameter name not allowed in this context".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: It is a Syntax Error if FunctionBodyContainsUseStrict of GeneratorBody is true
|
||||
// and IsSimpleParameterList of FormalParameters is false.
|
||||
if body.strict() && !params.is_simple() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Illegal 'use strict' directive in function with non-simple parameter list".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: If BindingIdentifier is present and the source code matching BindingIdentifier is strict mode code,
|
||||
// it is a Syntax Error if the StringValue of BindingIdentifier is "eval" or "arguments".
|
||||
if let Some(name) = name {
|
||||
if (cursor.strict_mode() || body.strict())
|
||||
&& [Sym::EVAL, Sym::ARGUMENTS].contains(&name.sym())
|
||||
{
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"unexpected identifier 'eval' or 'arguments' in strict mode".into(),
|
||||
name_span.start(),
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
// Catch early error for BindingIdentifier, because strictness of the functions body is also
|
||||
// relevant for the function parameters.
|
||||
if body.strict() && contains(¶ms, ContainsSymbol::EvalOrArguments) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"unexpected identifier 'eval' or 'arguments' in strict mode".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// It is a Syntax Error if any element of the BoundNames of FormalParameters
|
||||
// also occurs in the LexicallyDeclaredNames of FunctionBody.
|
||||
// https://tc39.es/ecma262/#sec-function-definitions-static-semantics-early-errors
|
||||
name_in_lexically_declared_names(
|
||||
&bound_names(¶ms),
|
||||
&top_level_lexically_declared_names(&body),
|
||||
params_start_position,
|
||||
)?;
|
||||
|
||||
let function =
|
||||
Function::new_with_binding_identifier(name.or(self.name), params, body, name.is_some());
|
||||
|
||||
if contains(&function, ContainsSymbol::Super) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"invalid super usage".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
Ok(function)
|
||||
}
|
||||
}
|
||||
154
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/function_expression/tests.rs
vendored
Normal file
154
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/function_expression/tests.rs
vendored
Normal file
@@ -0,0 +1,154 @@
|
||||
use crate::parser::tests::check_parser;
|
||||
use boa_ast::{
|
||||
declaration::{LexicalDeclaration, Variable},
|
||||
expression::literal::Literal,
|
||||
function::{FormalParameterList, Function},
|
||||
statement::Return,
|
||||
Declaration, Statement, StatementListItem,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
/// Checks async expression parsing.
|
||||
#[test]
|
||||
fn check_function_expression() {
|
||||
let interner = &mut Interner::default();
|
||||
let add = interner.get_or_intern_static("add", utf16!("add"));
|
||||
check_parser(
|
||||
"const add = function() {
|
||||
return 1;
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
add.into(),
|
||||
Some(
|
||||
Function::new(
|
||||
Some(add.into()),
|
||||
FormalParameterList::default(),
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(Literal::from(1).into())),
|
||||
))]
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_nested_function_expression() {
|
||||
let interner = &mut Interner::default();
|
||||
let a = interner.get_or_intern_static("a", utf16!("a"));
|
||||
let b = interner.get_or_intern_static("b", utf16!("b"));
|
||||
check_parser(
|
||||
"const a = function() {
|
||||
const b = function() {
|
||||
return 1;
|
||||
};
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
a.into(),
|
||||
Some(
|
||||
Function::new(
|
||||
Some(a.into()),
|
||||
FormalParameterList::default(),
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
b.into(),
|
||||
Some(
|
||||
Function::new(
|
||||
Some(b.into()),
|
||||
FormalParameterList::default(),
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(Literal::from(1).into())),
|
||||
))]
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()]
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_function_non_reserved_keyword() {
|
||||
macro_rules! genast {
|
||||
($keyword:literal, $interner:expr) => {
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
$interner.get_or_intern_static("add", utf16!("add")).into(),
|
||||
Some(
|
||||
Function::new_with_binding_identifier(
|
||||
Some($interner.get_or_intern_static($keyword, utf16!($keyword)).into()),
|
||||
FormalParameterList::default(),
|
||||
vec![StatementListItem::Statement(Statement::Return(Return::new(Some(Literal::from(1).into()))))].into(),
|
||||
true,
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into().unwrap(),
|
||||
))
|
||||
.into()]
|
||||
};
|
||||
}
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let ast = genast!("as", interner);
|
||||
check_parser("const add = function as() { return 1; };", ast, interner);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let ast = genast!("async", interner);
|
||||
check_parser("const add = function async() { return 1; };", ast, interner);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let ast = genast!("from", interner);
|
||||
check_parser("const add = function from() { return 1; };", ast, interner);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let ast = genast!("get", interner);
|
||||
check_parser("const add = function get() { return 1; };", ast, interner);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let ast = genast!("meta", interner);
|
||||
check_parser("const add = function meta() { return 1; };", ast, interner);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let ast = genast!("of", interner);
|
||||
check_parser("const add = function of() { return 1; };", ast, interner);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let ast = genast!("set", interner);
|
||||
check_parser("const add = function set() { return 1; };", ast, interner);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let ast = genast!("target", interner);
|
||||
check_parser(
|
||||
"const add = function target() { return 1; };",
|
||||
ast,
|
||||
interner,
|
||||
);
|
||||
}
|
||||
170
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/generator_expression/mod.rs
vendored
Normal file
170
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/generator_expression/mod.rs
vendored
Normal file
@@ -0,0 +1,170 @@
|
||||
//! Generator expression parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/function*
|
||||
//! [spec]: https://tc39.es/ecma262/#prod-GeneratorExpression
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::{
|
||||
lexer::{Error as LexError, TokenKind},
|
||||
parser::{
|
||||
expression::BindingIdentifier,
|
||||
function::{FormalParameters, FunctionBody},
|
||||
name_in_lexically_declared_names, Cursor, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
expression::Identifier,
|
||||
function::Generator,
|
||||
operations::{bound_names, contains, top_level_lexically_declared_names, ContainsSymbol},
|
||||
Keyword, Punctuator,
|
||||
};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Generator expression parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/function*
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-GeneratorExpression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct GeneratorExpression {
|
||||
name: Option<Identifier>,
|
||||
}
|
||||
|
||||
impl GeneratorExpression {
|
||||
/// Creates a new `GeneratorExpression` parser.
|
||||
pub(in crate::parser) fn new<N>(name: N) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
{
|
||||
Self { name: name.into() }
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for GeneratorExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Generator;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("GeneratorExpression", "Parsing");
|
||||
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::Mul),
|
||||
"generator expression",
|
||||
interner,
|
||||
)?;
|
||||
|
||||
let token = cursor.peek(0, interner).or_abrupt()?;
|
||||
let (name, name_span) = match token.kind() {
|
||||
TokenKind::Identifier(_)
|
||||
| TokenKind::Keyword((
|
||||
Keyword::Yield | Keyword::Await | Keyword::Async | Keyword::Of,
|
||||
_,
|
||||
)) => {
|
||||
let span = token.span();
|
||||
let name = BindingIdentifier::new(true, false).parse(cursor, interner)?;
|
||||
|
||||
(Some(name), span)
|
||||
}
|
||||
_ => (None, token.span()),
|
||||
};
|
||||
|
||||
let params_start_position = cursor
|
||||
.expect(Punctuator::OpenParen, "generator expression", interner)?
|
||||
.span()
|
||||
.end();
|
||||
|
||||
let params = FormalParameters::new(true, false).parse(cursor, interner)?;
|
||||
|
||||
cursor.expect(Punctuator::CloseParen, "generator expression", interner)?;
|
||||
cursor.expect(Punctuator::OpenBlock, "generator expression", interner)?;
|
||||
|
||||
let body = FunctionBody::new(true, false).parse(cursor, interner)?;
|
||||
|
||||
cursor.expect(Punctuator::CloseBlock, "generator expression", interner)?;
|
||||
|
||||
// If the source text matched by FormalParameters is strict mode code,
|
||||
// the Early Error rules for UniqueFormalParameters : FormalParameters are applied.
|
||||
// https://tc39.es/ecma262/#sec-generator-function-definitions-static-semantics-early-errors
|
||||
if (cursor.strict_mode() || body.strict()) && params.has_duplicates() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Duplicate parameter name not allowed in this context".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// It is a Syntax Error if FunctionBodyContainsUseStrict of GeneratorBody is true
|
||||
// and IsSimpleParameterList of FormalParameters is false.
|
||||
// https://tc39.es/ecma262/#sec-generator-function-definitions-static-semantics-early-errors
|
||||
if body.strict() && !params.is_simple() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Illegal 'use strict' directive in function with non-simple parameter list".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: If BindingIdentifier is present and the source code matching BindingIdentifier is strict mode code,
|
||||
// it is a Syntax Error if the StringValue of BindingIdentifier is "eval" or "arguments".
|
||||
if let Some(name) = name {
|
||||
if (cursor.strict_mode() || body.strict())
|
||||
&& [Sym::EVAL, Sym::ARGUMENTS].contains(&name.sym())
|
||||
{
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"unexpected identifier 'eval' or 'arguments' in strict mode".into(),
|
||||
name_span.start(),
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
// Catch early error for BindingIdentifier, because strictness of the functions body is also
|
||||
// relevant for the function parameters.
|
||||
if body.strict() && contains(¶ms, ContainsSymbol::EvalOrArguments) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"unexpected identifier 'eval' or 'arguments' in strict mode".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// It is a Syntax Error if any element of the BoundNames of FormalParameters
|
||||
// also occurs in the LexicallyDeclaredNames of GeneratorBody.
|
||||
// https://tc39.es/ecma262/#sec-generator-function-definitions-static-semantics-early-errors
|
||||
name_in_lexically_declared_names(
|
||||
&bound_names(¶ms),
|
||||
&top_level_lexically_declared_names(&body),
|
||||
params_start_position,
|
||||
)?;
|
||||
|
||||
// It is a Syntax Error if FormalParameters Contains YieldExpression is true.
|
||||
// https://tc39.es/ecma262/#sec-generator-function-definitions-static-semantics-early-errors
|
||||
if contains(¶ms, ContainsSymbol::YieldExpression) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"generator expression cannot contain yield expression in parameters".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
let function = Generator::new(name.or(self.name), params, body, name.is_some());
|
||||
|
||||
if contains(&function, ContainsSymbol::Super) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"invalid super usage".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
Ok(function)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
use crate::parser::tests::check_parser;
|
||||
use boa_ast::{
|
||||
declaration::{LexicalDeclaration, Variable},
|
||||
expression::{literal::Literal, Yield},
|
||||
function::{FormalParameterList, Generator},
|
||||
Declaration, Expression, Statement, StatementListItem,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
#[test]
|
||||
fn check_generator_function_expression() {
|
||||
let interner = &mut Interner::default();
|
||||
let gen = interner.get_or_intern_static("gen", utf16!("gen"));
|
||||
check_parser(
|
||||
"const gen = function*() {
|
||||
yield 1;
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
gen.into(),
|
||||
Some(
|
||||
Generator::new(
|
||||
Some(gen.into()),
|
||||
FormalParameterList::default(),
|
||||
vec![StatementListItem::Statement(Statement::Expression(
|
||||
Expression::from(Yield::new(Some(Literal::from(1).into()), false)),
|
||||
))]
|
||||
.into(),
|
||||
false,
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_generator_function_delegate_yield_expression() {
|
||||
let interner = &mut Interner::default();
|
||||
let gen = interner.get_or_intern_static("gen", utf16!("gen"));
|
||||
check_parser(
|
||||
"const gen = function*() {
|
||||
yield* 1;
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
gen.into(),
|
||||
Some(
|
||||
Generator::new(
|
||||
Some(gen.into()),
|
||||
FormalParameterList::default(),
|
||||
vec![StatementListItem::Statement(Statement::Expression(
|
||||
Expression::from(Yield::new(Some(Literal::from(1).into()), true)),
|
||||
))]
|
||||
.into(),
|
||||
false,
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
648
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/mod.rs
vendored
Normal file
648
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/mod.rs
vendored
Normal file
@@ -0,0 +1,648 @@
|
||||
//! Primary expression parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators#Primary_expressions
|
||||
//! [spec]: https://tc39.es/ecma262/#prod-PrimaryExpression
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
mod array_initializer;
|
||||
mod async_function_expression;
|
||||
mod async_generator_expression;
|
||||
mod class_expression;
|
||||
mod function_expression;
|
||||
mod generator_expression;
|
||||
mod template;
|
||||
|
||||
pub(in crate::parser) mod object_initializer;
|
||||
|
||||
use self::{
|
||||
array_initializer::ArrayLiteral, async_function_expression::AsyncFunctionExpression,
|
||||
async_generator_expression::AsyncGeneratorExpression, class_expression::ClassExpression,
|
||||
function_expression::FunctionExpression, generator_expression::GeneratorExpression,
|
||||
object_initializer::ObjectLiteral,
|
||||
};
|
||||
use crate::{
|
||||
lexer::{token::Numeric, InputElement, Token, TokenKind},
|
||||
parser::{
|
||||
expression::{
|
||||
identifiers::IdentifierReference, primary::template::TemplateLiteral,
|
||||
BindingIdentifier, Expression,
|
||||
},
|
||||
statement::{ArrayBindingPattern, ObjectBindingPattern},
|
||||
AllowAwait, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
self as ast,
|
||||
declaration::Variable,
|
||||
expression::{
|
||||
literal::Literal,
|
||||
operator::{assign::AssignTarget, binary::BinaryOp},
|
||||
Call, Identifier, New,
|
||||
},
|
||||
function::{FormalParameter, FormalParameterList},
|
||||
operations::{contains, ContainsSymbol},
|
||||
pattern::{ArrayPatternElement, ObjectPatternElement, Pattern},
|
||||
Keyword, Punctuator, Span,
|
||||
};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
pub(in crate::parser) use object_initializer::Initializer;
|
||||
|
||||
/// Parses a primary expression.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Expressions_and_Operators#Primary_expressions
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-PrimaryExpression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct PrimaryExpression {
|
||||
name: Option<Identifier>,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl PrimaryExpression {
|
||||
/// Creates a new `PrimaryExpression` parser.
|
||||
pub(super) fn new<N, Y, A>(name: N, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for PrimaryExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::Expression;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("PrimaryExpression", "Parsing");
|
||||
|
||||
// TODO: tok currently consumes the token instead of peeking, so the token
|
||||
// isn't passed and consumed by parsers according to spec (EX: GeneratorExpression)
|
||||
let tok = cursor.peek(0, interner).or_abrupt()?;
|
||||
let tok_position = tok.span().start();
|
||||
|
||||
match tok.kind() {
|
||||
TokenKind::Keyword((Keyword::This, true)) => Err(Error::general(
|
||||
"Keyword must not contain escaped characters",
|
||||
tok_position,
|
||||
)),
|
||||
TokenKind::Keyword((Keyword::This, false)) => {
|
||||
cursor.advance(interner);
|
||||
Ok(ast::Expression::This)
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Function, _)) => {
|
||||
cursor.advance(interner);
|
||||
let next_token = cursor.peek(0, interner).or_abrupt()?;
|
||||
if next_token.kind() == &TokenKind::Punctuator(Punctuator::Mul) {
|
||||
GeneratorExpression::new(self.name)
|
||||
.parse(cursor, interner)
|
||||
.map(Into::into)
|
||||
} else {
|
||||
FunctionExpression::new(self.name)
|
||||
.parse(cursor, interner)
|
||||
.map(Into::into)
|
||||
}
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Class, _)) => {
|
||||
cursor.advance(interner);
|
||||
ClassExpression::new(self.name, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)
|
||||
.map(Into::into)
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Async, contain_escaped_char)) => {
|
||||
let contain_escaped_char = *contain_escaped_char;
|
||||
match cursor.peek(1, interner)?.map(Token::kind) {
|
||||
Some(TokenKind::Keyword((Keyword::Function, _))) if contain_escaped_char => {
|
||||
Err(Error::general(
|
||||
"Keyword must not contain escaped characters",
|
||||
tok_position,
|
||||
))
|
||||
}
|
||||
Some(TokenKind::Keyword((Keyword::Function, _))) => {
|
||||
cursor.advance(interner);
|
||||
match cursor.peek(1, interner)?.map(Token::kind) {
|
||||
Some(TokenKind::Punctuator(Punctuator::Mul)) => {
|
||||
AsyncGeneratorExpression::new(self.name)
|
||||
.parse(cursor, interner)
|
||||
.map(Into::into)
|
||||
}
|
||||
_ => AsyncFunctionExpression::new(self.name)
|
||||
.parse(cursor, interner)
|
||||
.map(Into::into),
|
||||
}
|
||||
}
|
||||
_ => IdentifierReference::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)
|
||||
.map(Into::into),
|
||||
}
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenParen) => {
|
||||
cursor.advance(interner);
|
||||
cursor.set_goal(InputElement::RegExp);
|
||||
let expr = CoverParenthesizedExpressionAndArrowParameterList::new(
|
||||
self.name,
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
Ok(expr)
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenBracket) => {
|
||||
cursor.advance(interner);
|
||||
cursor.set_goal(InputElement::RegExp);
|
||||
ArrayLiteral::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)
|
||||
.map(Into::into)
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenBlock) => {
|
||||
cursor.advance(interner);
|
||||
cursor.set_goal(InputElement::RegExp);
|
||||
ObjectLiteral::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)
|
||||
.map(Into::into)
|
||||
}
|
||||
TokenKind::BooleanLiteral(boolean) => {
|
||||
let node = Literal::from(*boolean).into();
|
||||
cursor.advance(interner);
|
||||
Ok(node)
|
||||
}
|
||||
TokenKind::NullLiteral => {
|
||||
cursor.advance(interner);
|
||||
Ok(Literal::Null.into())
|
||||
}
|
||||
TokenKind::Identifier(_)
|
||||
| TokenKind::Keyword((
|
||||
Keyword::Let | Keyword::Yield | Keyword::Await | Keyword::Of,
|
||||
_,
|
||||
)) => IdentifierReference::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)
|
||||
.map(Into::into),
|
||||
TokenKind::StringLiteral(lit) => {
|
||||
let node = Literal::from(*lit).into();
|
||||
cursor.advance(interner);
|
||||
Ok(node)
|
||||
}
|
||||
TokenKind::TemplateNoSubstitution(template_string) => {
|
||||
let node = Literal::from(
|
||||
template_string
|
||||
.to_owned_cooked(interner)
|
||||
.map_err(Error::lex)?,
|
||||
)
|
||||
.into();
|
||||
cursor.advance(interner);
|
||||
Ok(node)
|
||||
}
|
||||
TokenKind::NumericLiteral(Numeric::Integer(num)) => {
|
||||
let node = Literal::from(*num).into();
|
||||
cursor.advance(interner);
|
||||
Ok(node)
|
||||
}
|
||||
TokenKind::NumericLiteral(Numeric::Rational(num)) => {
|
||||
let node = Literal::from(*num).into();
|
||||
cursor.advance(interner);
|
||||
Ok(node)
|
||||
}
|
||||
TokenKind::NumericLiteral(Numeric::BigInt(num)) => {
|
||||
let node = Literal::from(num.clone()).into();
|
||||
cursor.advance(interner);
|
||||
Ok(node)
|
||||
}
|
||||
TokenKind::RegularExpressionLiteral(body, flags) => {
|
||||
let node = ast::Expression::from(New::from(Call::new(
|
||||
Identifier::new(Sym::REGEXP).into(),
|
||||
vec![Literal::from(*body).into(), Literal::from(*flags).into()].into(),
|
||||
)));
|
||||
cursor.advance(interner);
|
||||
Ok(node)
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Div) => {
|
||||
let position = tok.span().start();
|
||||
cursor.advance(interner);
|
||||
let tok = cursor.lex_regex(position, interner)?;
|
||||
|
||||
if let TokenKind::RegularExpressionLiteral(body, flags) = *tok.kind() {
|
||||
Ok(ast::Expression::from(New::from(Call::new(
|
||||
Identifier::new(Sym::REGEXP).into(),
|
||||
vec![Literal::from(body).into(), Literal::from(flags).into()].into(),
|
||||
))))
|
||||
} else {
|
||||
// A regex was expected and nothing else.
|
||||
Err(Error::unexpected(
|
||||
tok.to_string(interner),
|
||||
tok.span(),
|
||||
"regular expression literal",
|
||||
))
|
||||
}
|
||||
}
|
||||
TokenKind::TemplateMiddle(template_string) => {
|
||||
let parser = TemplateLiteral::new(
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
tok.span().start(),
|
||||
template_string
|
||||
.to_owned_cooked(interner)
|
||||
.map_err(Error::lex)?,
|
||||
);
|
||||
cursor.advance(interner);
|
||||
parser.parse(cursor, interner).map(Into::into)
|
||||
}
|
||||
_ => Err(Error::unexpected(
|
||||
tok.to_string(interner),
|
||||
tok.span(),
|
||||
"primary expression",
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a `CoverParenthesizedExpressionAndArrowParameterList` expression.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-CoverParenthesizedExpressionAndArrowParameterList
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct CoverParenthesizedExpressionAndArrowParameterList {
|
||||
name: Option<Identifier>,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl CoverParenthesizedExpressionAndArrowParameterList {
|
||||
/// Creates a new `CoverParenthesizedExpressionAndArrowParameterList` parser.
|
||||
pub(super) fn new<N, Y, A>(name: N, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for CoverParenthesizedExpressionAndArrowParameterList
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::Expression;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
#[derive(Debug)]
|
||||
enum InnerExpression {
|
||||
Expression(ast::Expression),
|
||||
SpreadObject(Vec<ObjectPatternElement>),
|
||||
SpreadArray(Vec<ArrayPatternElement>),
|
||||
SpreadBinding(Identifier),
|
||||
}
|
||||
|
||||
let _timer = Profiler::global().start_event(
|
||||
"CoverParenthesizedExpressionAndArrowParameterList",
|
||||
"Parsing",
|
||||
);
|
||||
|
||||
let start_span = cursor.peek(0, interner).or_abrupt()?.span();
|
||||
|
||||
let mut expressions = Vec::new();
|
||||
let mut tailing_comma = None;
|
||||
|
||||
let next = cursor.peek(0, interner).or_abrupt()?;
|
||||
let span = match next.kind() {
|
||||
TokenKind::Punctuator(Punctuator::CloseParen) => {
|
||||
let span = next.span();
|
||||
cursor.advance(interner);
|
||||
span
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Spread) => {
|
||||
cursor.advance(interner);
|
||||
let next = cursor.peek(0, interner).or_abrupt()?;
|
||||
match next.kind() {
|
||||
TokenKind::Punctuator(Punctuator::OpenBlock) => {
|
||||
let bindings =
|
||||
ObjectBindingPattern::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
expressions.push(InnerExpression::SpreadObject(bindings));
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenBracket) => {
|
||||
let bindings = ArrayBindingPattern::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
expressions.push(InnerExpression::SpreadArray(bindings));
|
||||
}
|
||||
_ => {
|
||||
let binding = BindingIdentifier::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
expressions.push(InnerExpression::SpreadBinding(binding));
|
||||
}
|
||||
}
|
||||
|
||||
cursor
|
||||
.expect(
|
||||
Punctuator::CloseParen,
|
||||
"CoverParenthesizedExpressionAndArrowParameterList",
|
||||
interner,
|
||||
)?
|
||||
.span()
|
||||
}
|
||||
_ => {
|
||||
let expression =
|
||||
Expression::new(self.name, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
expressions.push(InnerExpression::Expression(expression));
|
||||
|
||||
let next = cursor.peek(0, interner).or_abrupt()?;
|
||||
match next.kind() {
|
||||
TokenKind::Punctuator(Punctuator::CloseParen) => {
|
||||
let span = next.span();
|
||||
cursor.advance(interner);
|
||||
span
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Comma) => {
|
||||
cursor.advance(interner);
|
||||
let next = cursor.peek(0, interner).or_abrupt()?;
|
||||
match next.kind() {
|
||||
TokenKind::Punctuator(Punctuator::CloseParen) => {
|
||||
let span = next.span();
|
||||
tailing_comma = Some(next.span());
|
||||
cursor.advance(interner);
|
||||
span
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Spread) => {
|
||||
cursor.advance(interner);
|
||||
let next = cursor.peek(0, interner).or_abrupt()?;
|
||||
match next.kind() {
|
||||
TokenKind::Punctuator(Punctuator::OpenBlock) => {
|
||||
let bindings = ObjectBindingPattern::new(
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
expressions.push(InnerExpression::SpreadObject(bindings));
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenBracket) => {
|
||||
let bindings = ArrayBindingPattern::new(
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
expressions.push(InnerExpression::SpreadArray(bindings));
|
||||
}
|
||||
_ => {
|
||||
let binding = BindingIdentifier::new(
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
expressions.push(InnerExpression::SpreadBinding(binding));
|
||||
}
|
||||
}
|
||||
|
||||
cursor
|
||||
.expect(
|
||||
Punctuator::CloseParen,
|
||||
"CoverParenthesizedExpressionAndArrowParameterList",
|
||||
interner,
|
||||
)?
|
||||
.span()
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::expected(
|
||||
vec![")".to_owned(), "...".to_owned()],
|
||||
next.kind().to_string(interner),
|
||||
next.span(),
|
||||
"CoverParenthesizedExpressionAndArrowParameterList",
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::expected(
|
||||
vec![")".to_owned(), ",".to_owned()],
|
||||
next.kind().to_string(interner),
|
||||
next.span(),
|
||||
"CoverParenthesizedExpressionAndArrowParameterList",
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let is_arrow = if cursor.peek(0, interner)?.map(Token::kind)
|
||||
== Some(&TokenKind::Punctuator(Punctuator::Arrow))
|
||||
{
|
||||
!cursor.peek_is_line_terminator(0, interner).or_abrupt()?
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
// If the next token is not an arrow, we know that we must parse a parenthesized expression.
|
||||
if !is_arrow {
|
||||
if let Some(span) = tailing_comma {
|
||||
return Err(Error::unexpected(
|
||||
Punctuator::Comma,
|
||||
span,
|
||||
"trailing comma in parenthesized expression",
|
||||
));
|
||||
}
|
||||
if expressions.is_empty() {
|
||||
return Err(Error::unexpected(
|
||||
Punctuator::CloseParen,
|
||||
span,
|
||||
"empty parenthesized expression",
|
||||
));
|
||||
}
|
||||
if expressions.len() != 1 {
|
||||
return Err(Error::unexpected(
|
||||
Punctuator::CloseParen,
|
||||
span,
|
||||
"multiple expressions in parenthesized expression",
|
||||
));
|
||||
}
|
||||
if let InnerExpression::Expression(expression) = &expressions[0] {
|
||||
return Ok(expression.clone());
|
||||
}
|
||||
return Err(Error::unexpected(
|
||||
Punctuator::CloseParen,
|
||||
span,
|
||||
"parenthesized expression with spread expressions",
|
||||
));
|
||||
}
|
||||
|
||||
// We know that we must parse an arrow function.
|
||||
// We parse the expressions in to a parameter list.
|
||||
|
||||
let mut parameters = Vec::new();
|
||||
|
||||
for expression in expressions {
|
||||
match expression {
|
||||
InnerExpression::Expression(node) => {
|
||||
expression_to_formal_parameters(
|
||||
&node,
|
||||
&mut parameters,
|
||||
cursor.strict_mode(),
|
||||
start_span,
|
||||
)?;
|
||||
}
|
||||
InnerExpression::SpreadObject(bindings) => {
|
||||
let declaration = Variable::from_pattern(bindings.into(), None);
|
||||
let parameter = FormalParameter::new(declaration, true);
|
||||
parameters.push(parameter);
|
||||
}
|
||||
InnerExpression::SpreadArray(bindings) => {
|
||||
let declaration = Variable::from_pattern(bindings.into(), None);
|
||||
let parameter = FormalParameter::new(declaration, true);
|
||||
parameters.push(parameter);
|
||||
}
|
||||
InnerExpression::SpreadBinding(ident) => {
|
||||
let declaration = Variable::from_identifier(ident, None);
|
||||
let parameter = FormalParameter::new(declaration, true);
|
||||
parameters.push(parameter);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let parameters = FormalParameterList::from(parameters);
|
||||
|
||||
if let Some(span) = tailing_comma {
|
||||
if parameters.has_rest_parameter() {
|
||||
return Err(Error::general(
|
||||
"rest parameter must be last formal parameter",
|
||||
span.start(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if contains(¶meters, ContainsSymbol::YieldExpression) {
|
||||
return Err(Error::general(
|
||||
"yield expression is not allowed in formal parameter list of arrow function",
|
||||
start_span.start(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(ast::Expression::FormalParameterList(parameters))
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert an expression to a formal parameter and append it to the given parameter list.
|
||||
fn expression_to_formal_parameters(
|
||||
node: &ast::Expression,
|
||||
parameters: &mut Vec<FormalParameter>,
|
||||
strict: bool,
|
||||
span: Span,
|
||||
) -> ParseResult<()> {
|
||||
match node {
|
||||
ast::Expression::Identifier(identifier) if strict && *identifier == Sym::EVAL => {
|
||||
return Err(Error::general(
|
||||
"parameter name 'eval' not allowed in strict mode",
|
||||
span.start(),
|
||||
));
|
||||
}
|
||||
ast::Expression::Identifier(identifier) if strict && *identifier == Sym::ARGUMENTS => {
|
||||
return Err(Error::general(
|
||||
"parameter name 'arguments' not allowed in strict mode",
|
||||
span.start(),
|
||||
));
|
||||
}
|
||||
ast::Expression::Identifier(identifier) => {
|
||||
parameters.push(FormalParameter::new(
|
||||
Variable::from_identifier(*identifier, None),
|
||||
false,
|
||||
));
|
||||
}
|
||||
ast::Expression::Binary(bin_op) if bin_op.op() == BinaryOp::Comma => {
|
||||
expression_to_formal_parameters(bin_op.lhs(), parameters, strict, span)?;
|
||||
expression_to_formal_parameters(bin_op.rhs(), parameters, strict, span)?;
|
||||
}
|
||||
ast::Expression::Assign(assign) => match assign.lhs() {
|
||||
AssignTarget::Identifier(ident) => {
|
||||
parameters.push(FormalParameter::new(
|
||||
Variable::from_identifier(*ident, Some(assign.rhs().clone())),
|
||||
false,
|
||||
));
|
||||
}
|
||||
AssignTarget::Pattern(pattern) => match pattern {
|
||||
Pattern::Object(pattern) => {
|
||||
parameters.push(FormalParameter::new(
|
||||
Variable::from_pattern(
|
||||
pattern.bindings().to_vec().into(),
|
||||
Some(assign.rhs().clone()),
|
||||
),
|
||||
false,
|
||||
));
|
||||
}
|
||||
Pattern::Array(pattern) => {
|
||||
parameters.push(FormalParameter::new(
|
||||
Variable::from_pattern(
|
||||
pattern.bindings().to_vec().into(),
|
||||
Some(assign.rhs().clone()),
|
||||
),
|
||||
false,
|
||||
));
|
||||
}
|
||||
},
|
||||
AssignTarget::Access(_) => {
|
||||
return Err(Error::general(
|
||||
"invalid initialization expression in formal parameter list",
|
||||
span.start(),
|
||||
));
|
||||
}
|
||||
},
|
||||
ast::Expression::ObjectLiteral(object) => {
|
||||
let pattern = object.to_pattern(strict).ok_or_else(|| {
|
||||
Error::general(
|
||||
"invalid object binding pattern in formal parameter list",
|
||||
span.start(),
|
||||
)
|
||||
})?;
|
||||
|
||||
parameters.push(FormalParameter::new(
|
||||
Variable::from_pattern(pattern.into(), None),
|
||||
false,
|
||||
));
|
||||
}
|
||||
ast::Expression::ArrayLiteral(array) => {
|
||||
let pattern = array.to_pattern(strict).ok_or_else(|| {
|
||||
Error::general(
|
||||
"invalid array binding pattern in formal parameter list",
|
||||
span.start(),
|
||||
)
|
||||
})?;
|
||||
|
||||
parameters.push(FormalParameter::new(
|
||||
Variable::from_pattern(pattern.into(), None),
|
||||
false,
|
||||
));
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::unexpected(
|
||||
")".to_string(),
|
||||
span,
|
||||
"parenthesized expression with non-binding expression",
|
||||
));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
1039
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/object_initializer/mod.rs
vendored
Normal file
1039
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/object_initializer/mod.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
568
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/object_initializer/tests.rs
vendored
Normal file
568
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/object_initializer/tests.rs
vendored
Normal file
@@ -0,0 +1,568 @@
|
||||
use crate::parser::tests::{check_invalid, check_parser};
|
||||
use boa_ast::{
|
||||
declaration::{LexicalDeclaration, Variable},
|
||||
expression::{
|
||||
literal::{Literal, ObjectLiteral},
|
||||
Identifier,
|
||||
},
|
||||
function::{
|
||||
AsyncFunction, AsyncGenerator, FormalParameter, FormalParameterList,
|
||||
FormalParameterListFlags, Function,
|
||||
},
|
||||
property::{MethodDefinition, PropertyDefinition, PropertyName},
|
||||
Declaration, StatementList,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
/// Checks object literal parsing.
|
||||
#[test]
|
||||
fn check_object_literal() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
let object_properties = vec![
|
||||
PropertyDefinition::Property(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
Literal::from(true).into(),
|
||||
),
|
||||
PropertyDefinition::Property(
|
||||
interner.get_or_intern_static("b", utf16!("b")).into(),
|
||||
Literal::from(false).into(),
|
||||
),
|
||||
];
|
||||
|
||||
check_parser(
|
||||
"const x = {
|
||||
a: true,
|
||||
b: false,
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(ObjectLiteral::from(object_properties).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Tests short function syntax.
|
||||
#[test]
|
||||
fn check_object_short_function() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
let object_properties = vec![
|
||||
PropertyDefinition::Property(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
Literal::from(true).into(),
|
||||
),
|
||||
PropertyDefinition::MethodDefinition(
|
||||
interner.get_or_intern_static("b", utf16!("b")).into(),
|
||||
MethodDefinition::Ordinary(Function::new(
|
||||
Some(interner.get_or_intern_static("b", utf16!("b")).into()),
|
||||
FormalParameterList::default(),
|
||||
StatementList::default(),
|
||||
)),
|
||||
),
|
||||
];
|
||||
|
||||
check_parser(
|
||||
"const x = {
|
||||
a: true,
|
||||
b() {},
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(ObjectLiteral::from(object_properties).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Testing short function syntax with arguments.
|
||||
#[test]
|
||||
fn check_object_short_function_arguments() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
let parameters = FormalParameterList::from(FormalParameter::new(
|
||||
Variable::from_identifier(
|
||||
interner.get_or_intern_static("test", utf16!("test")).into(),
|
||||
None,
|
||||
),
|
||||
false,
|
||||
));
|
||||
|
||||
assert_eq!(parameters.flags(), FormalParameterListFlags::default());
|
||||
assert_eq!(parameters.length(), 1);
|
||||
|
||||
let object_properties = vec![
|
||||
PropertyDefinition::Property(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
Literal::from(true).into(),
|
||||
),
|
||||
PropertyDefinition::MethodDefinition(
|
||||
interner.get_or_intern_static("b", utf16!("b")).into(),
|
||||
MethodDefinition::Ordinary(Function::new(
|
||||
Some(interner.get_or_intern_static("b", utf16!("b")).into()),
|
||||
parameters,
|
||||
StatementList::default(),
|
||||
)),
|
||||
),
|
||||
];
|
||||
|
||||
check_parser(
|
||||
"const x = {
|
||||
a: true,
|
||||
b(test) {}
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(ObjectLiteral::from(object_properties).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_object_getter() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
let object_properties = vec![
|
||||
PropertyDefinition::Property(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
Literal::from(true).into(),
|
||||
),
|
||||
PropertyDefinition::MethodDefinition(
|
||||
interner.get_or_intern_static("b", utf16!("b")).into(),
|
||||
MethodDefinition::Get(Function::new(
|
||||
Some(
|
||||
interner
|
||||
.get_or_intern_static("get b", utf16!("get b"))
|
||||
.into(),
|
||||
),
|
||||
FormalParameterList::default(),
|
||||
StatementList::default(),
|
||||
)),
|
||||
),
|
||||
];
|
||||
|
||||
check_parser(
|
||||
"const x = {
|
||||
a: true,
|
||||
get b() {}
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(ObjectLiteral::from(object_properties).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_object_setter() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
let params = FormalParameterList::from(FormalParameter::new(
|
||||
Variable::from_identifier(
|
||||
interner.get_or_intern_static("test", utf16!("test")).into(),
|
||||
None,
|
||||
),
|
||||
false,
|
||||
));
|
||||
|
||||
assert_eq!(params.flags(), FormalParameterListFlags::default());
|
||||
assert_eq!(params.length(), 1);
|
||||
|
||||
let object_properties = vec![
|
||||
PropertyDefinition::Property(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
Literal::from(true).into(),
|
||||
),
|
||||
PropertyDefinition::MethodDefinition(
|
||||
interner.get_or_intern_static("b", utf16!("b")).into(),
|
||||
MethodDefinition::Set(Function::new(
|
||||
Some(
|
||||
interner
|
||||
.get_or_intern_static("set b", utf16!("set b"))
|
||||
.into(),
|
||||
),
|
||||
params,
|
||||
StatementList::default(),
|
||||
)),
|
||||
),
|
||||
];
|
||||
|
||||
check_parser(
|
||||
"const x = {
|
||||
a: true,
|
||||
set b(test) {}
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(ObjectLiteral::from(object_properties).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_object_short_function_get() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
let object_properties = vec![PropertyDefinition::MethodDefinition(
|
||||
interner.get_or_intern_static("get", utf16!("get")).into(),
|
||||
MethodDefinition::Ordinary(Function::new(
|
||||
Some(interner.get_or_intern_static("get", utf16!("get")).into()),
|
||||
FormalParameterList::default(),
|
||||
StatementList::default(),
|
||||
)),
|
||||
)];
|
||||
|
||||
check_parser(
|
||||
"const x = {
|
||||
get() {}
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(ObjectLiteral::from(object_properties).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_object_short_function_set() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
let object_properties = vec![PropertyDefinition::MethodDefinition(
|
||||
interner.get_or_intern_static("set", utf16!("set")).into(),
|
||||
MethodDefinition::Ordinary(Function::new(
|
||||
Some(interner.get_or_intern_static("set", utf16!("set")).into()),
|
||||
FormalParameterList::default(),
|
||||
StatementList::default(),
|
||||
)),
|
||||
)];
|
||||
|
||||
check_parser(
|
||||
"const x = {
|
||||
set() {}
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(ObjectLiteral::from(object_properties).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_object_shorthand_property_names() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
let object_properties = vec![PropertyDefinition::IdentifierReference(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
)];
|
||||
|
||||
check_parser(
|
||||
"const a = true;
|
||||
const x = { a };
|
||||
",
|
||||
vec![
|
||||
Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
Some(Literal::from(true).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into(),
|
||||
Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(ObjectLiteral::from(object_properties).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into(),
|
||||
],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_object_shorthand_multiple_properties() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
let object_properties = vec![
|
||||
PropertyDefinition::IdentifierReference(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
),
|
||||
PropertyDefinition::IdentifierReference(
|
||||
interner.get_or_intern_static("b", utf16!("b")).into(),
|
||||
),
|
||||
];
|
||||
|
||||
check_parser(
|
||||
"const a = true;
|
||||
const b = false;
|
||||
const x = { a, b, };
|
||||
",
|
||||
vec![
|
||||
Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
Some(Literal::from(true).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into(),
|
||||
Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("b", utf16!("b")).into(),
|
||||
Some(Literal::from(false).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into(),
|
||||
Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(ObjectLiteral::from(object_properties).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into(),
|
||||
],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_object_spread() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
let object_properties = vec![
|
||||
PropertyDefinition::Property(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
Literal::from(1).into(),
|
||||
),
|
||||
PropertyDefinition::SpreadObject(
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
),
|
||||
];
|
||||
|
||||
check_parser(
|
||||
"const x = { a: 1, ...b };
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(ObjectLiteral::from(object_properties).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_async_method() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
let object_properties = vec![PropertyDefinition::MethodDefinition(
|
||||
interner.get_or_intern_static("dive", utf16!("dive")).into(),
|
||||
MethodDefinition::Async(AsyncFunction::new(
|
||||
Some(interner.get_or_intern_static("dive", utf16!("dive")).into()),
|
||||
FormalParameterList::default(),
|
||||
StatementList::default(),
|
||||
false,
|
||||
)),
|
||||
)];
|
||||
|
||||
check_parser(
|
||||
"const x = {
|
||||
async dive() {}
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(ObjectLiteral::from(object_properties).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_async_generator_method() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
let object_properties = vec![PropertyDefinition::MethodDefinition(
|
||||
interner
|
||||
.get_or_intern_static("vroom", utf16!("vroom"))
|
||||
.into(),
|
||||
MethodDefinition::AsyncGenerator(AsyncGenerator::new(
|
||||
Some(
|
||||
interner
|
||||
.get_or_intern_static("vroom", utf16!("vroom"))
|
||||
.into(),
|
||||
),
|
||||
FormalParameterList::default(),
|
||||
StatementList::default(),
|
||||
false,
|
||||
)),
|
||||
)];
|
||||
|
||||
check_parser(
|
||||
"const x = {
|
||||
async* vroom() {}
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(ObjectLiteral::from(object_properties).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_async_method_lineterminator() {
|
||||
check_invalid(
|
||||
"const x = {
|
||||
async
|
||||
dive(){}
|
||||
};
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_async_gen_method_lineterminator() {
|
||||
check_invalid(
|
||||
"const x = {
|
||||
async
|
||||
* vroom() {}
|
||||
};
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_async_ordinary_method() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
let object_properties = vec![PropertyDefinition::MethodDefinition(
|
||||
PropertyName::Literal(interner.get_or_intern_static("async", utf16!("async"))),
|
||||
MethodDefinition::Ordinary(Function::new(
|
||||
Some(
|
||||
interner
|
||||
.get_or_intern_static("async", utf16!("async"))
|
||||
.into(),
|
||||
),
|
||||
FormalParameterList::default(),
|
||||
StatementList::default(),
|
||||
)),
|
||||
)];
|
||||
|
||||
check_parser(
|
||||
"const x = {
|
||||
async() {}
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(ObjectLiteral::from(object_properties).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_async_property() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
let object_properties = vec![PropertyDefinition::Property(
|
||||
PropertyName::Literal(interner.get_or_intern_static("async", utf16!("async"))),
|
||||
Literal::from(true).into(),
|
||||
)];
|
||||
|
||||
check_parser(
|
||||
"const x = {
|
||||
async: true
|
||||
};
|
||||
",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(ObjectLiteral::from(object_properties).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
107
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/template/mod.rs
vendored
Normal file
107
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/template/mod.rs
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
//! Template literal parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-template-literals
|
||||
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{expression::Expression, AllowAwait, AllowYield, Cursor, ParseResult, TokenParser},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
expression::literal::{self, TemplateElement},
|
||||
Position, Punctuator,
|
||||
};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Parses a template literal.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-TemplateLiteral
|
||||
#[derive(Debug, Clone)]
|
||||
pub(super) struct TemplateLiteral {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
start: Position,
|
||||
first: Sym,
|
||||
}
|
||||
|
||||
impl TemplateLiteral {
|
||||
/// Creates a new `TemplateLiteral` parser.
|
||||
pub(super) fn new<Y, A>(allow_yield: Y, allow_await: A, start: Position, first: Sym) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
start,
|
||||
first,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for TemplateLiteral
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = literal::TemplateLiteral;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("TemplateLiteral", "Parsing");
|
||||
|
||||
let mut elements = vec![
|
||||
TemplateElement::String(self.first),
|
||||
TemplateElement::Expr(
|
||||
Expression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?,
|
||||
),
|
||||
];
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::CloseBlock),
|
||||
"template literal",
|
||||
interner,
|
||||
)?;
|
||||
|
||||
loop {
|
||||
match cursor.lex_template(self.start, interner)?.kind() {
|
||||
TokenKind::TemplateMiddle(template_string) => {
|
||||
let cooked = template_string
|
||||
.to_owned_cooked(interner)
|
||||
.map_err(Error::lex)?;
|
||||
|
||||
elements.push(TemplateElement::String(cooked));
|
||||
elements.push(TemplateElement::Expr(
|
||||
Expression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?,
|
||||
));
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::CloseBlock),
|
||||
"template literal",
|
||||
interner,
|
||||
)?;
|
||||
}
|
||||
TokenKind::TemplateNoSubstitution(template_string) => {
|
||||
let cooked = template_string
|
||||
.to_owned_cooked(interner)
|
||||
.map_err(Error::lex)?;
|
||||
|
||||
elements.push(TemplateElement::String(cooked));
|
||||
return Ok(literal::TemplateLiteral::new(elements.into()));
|
||||
}
|
||||
_ => return Err(Error::general("cannot parse template literal", self.start)),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
25
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/tests.rs
vendored
Normal file
25
javascript-engine/external/boa/boa_parser/src/parser/expression/primary/tests.rs
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
use crate::parser::tests::check_parser;
|
||||
use boa_ast::{expression::literal::Literal, Expression, Statement};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_macros::utf16;
|
||||
|
||||
#[test]
|
||||
fn check_string() {
|
||||
// Check empty string
|
||||
check_parser(
|
||||
"\"\"",
|
||||
vec![Statement::Expression(Expression::from(Literal::from(Sym::EMPTY_STRING))).into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
|
||||
// Check non-empty string
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"\"hello\"",
|
||||
vec![Statement::Expression(Expression::from(Literal::from(
|
||||
interner.get_or_intern_static("hello", utf16!("hello")),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
723
javascript-engine/external/boa/boa_parser/src/parser/expression/tests.rs
vendored
Normal file
723
javascript-engine/external/boa/boa_parser/src/parser/expression/tests.rs
vendored
Normal file
@@ -0,0 +1,723 @@
|
||||
use crate::parser::tests::{check_invalid, check_parser};
|
||||
use boa_ast::{
|
||||
declaration::{LexicalDeclaration, Variable},
|
||||
expression::{
|
||||
literal::Literal,
|
||||
operator::{
|
||||
assign::AssignOp,
|
||||
binary::{ArithmeticOp, BitwiseOp, LogicalOp, RelationalOp},
|
||||
Assign, Binary,
|
||||
},
|
||||
Call, Identifier, New,
|
||||
},
|
||||
Declaration, Expression, Statement,
|
||||
};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_macros::utf16;
|
||||
|
||||
/// Checks numeric operations
|
||||
#[test]
|
||||
fn check_numeric_operations() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a + b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
ArithmeticOp::Add.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a+1",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
ArithmeticOp::Add.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Literal::from(1).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a - b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
ArithmeticOp::Sub.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a-1",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
ArithmeticOp::Sub.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Literal::from(1).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a / b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
ArithmeticOp::Div.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a/2",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
ArithmeticOp::Div.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Literal::from(2).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"let myRegex = /=/;",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Let(
|
||||
vec![Variable::from_identifier(
|
||||
interner
|
||||
.get_or_intern_static("myRegex", utf16!("myRegex"))
|
||||
.into(),
|
||||
Some(
|
||||
New::from(Call::new(
|
||||
Identifier::new(Sym::REGEXP).into(),
|
||||
vec![
|
||||
Literal::from(interner.get_or_intern_static("=", utf16!("="))).into(),
|
||||
Literal::from(Sym::EMPTY_STRING).into(),
|
||||
]
|
||||
.into(),
|
||||
))
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"fn(/=/);",
|
||||
vec![Statement::Expression(Expression::from(Call::new(
|
||||
Identifier::new(interner.get_or_intern_static("fn", utf16!("fn"))).into(),
|
||||
vec![New::from(Call::new(
|
||||
Identifier::new(Sym::REGEXP).into(),
|
||||
vec![
|
||||
Literal::from(interner.get_or_intern_static("=", utf16!("="))).into(),
|
||||
Literal::from(Sym::EMPTY_STRING).into(),
|
||||
]
|
||||
.into(),
|
||||
))
|
||||
.into()]
|
||||
.into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"fn(a / b);",
|
||||
vec![Statement::Expression(Expression::from(Call::new(
|
||||
Identifier::new(interner.get_or_intern_static("fn", utf16!("fn"))).into(),
|
||||
vec![Expression::from(Binary::new(
|
||||
ArithmeticOp::Div.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
))]
|
||||
.into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"fn(a) / b;",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
ArithmeticOp::Div.into(),
|
||||
Call::new(
|
||||
Identifier::new(interner.get_or_intern_static("fn", utf16!("fn"))).into(),
|
||||
vec![Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into()]
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a * b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
ArithmeticOp::Mul.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a*2",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
ArithmeticOp::Mul.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Literal::from(2).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a ** b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
ArithmeticOp::Exp.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a**2",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
ArithmeticOp::Exp.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Literal::from(2).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a % b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
ArithmeticOp::Mod.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a%2",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
ArithmeticOp::Mod.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Literal::from(2).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
// Checks complex numeric operations.
|
||||
#[test]
|
||||
fn check_complex_numeric_operations() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a + d*(b-3)+1",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
ArithmeticOp::Add.into(),
|
||||
Binary::new(
|
||||
ArithmeticOp::Add.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Binary::new(
|
||||
ArithmeticOp::Mul.into(),
|
||||
Identifier::new(interner.get_or_intern_static("d", utf16!("d"))).into(),
|
||||
Binary::new(
|
||||
ArithmeticOp::Sub.into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
Literal::from(3).into(),
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
Literal::from(1).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks bitwise operations.
|
||||
#[test]
|
||||
fn check_bitwise_operations() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a & b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
BitwiseOp::And.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a&b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
BitwiseOp::And.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a | b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
BitwiseOp::Or.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a|b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
BitwiseOp::Or.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a ^ b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
BitwiseOp::Xor.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a^b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
BitwiseOp::Xor.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a << b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
BitwiseOp::Shl.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a<<b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
BitwiseOp::Shl.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a >> b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
BitwiseOp::Shr.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a>>b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
BitwiseOp::Shr.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks assignment operations.
|
||||
#[test]
|
||||
fn check_assign_operations() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a += b",
|
||||
vec![Statement::Expression(Expression::from(Assign::new(
|
||||
AssignOp::Add,
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a -= b",
|
||||
vec![Statement::Expression(Expression::from(Assign::new(
|
||||
AssignOp::Sub,
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a *= b",
|
||||
vec![Statement::Expression(Expression::from(Assign::new(
|
||||
AssignOp::Mul,
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a **= b",
|
||||
vec![Statement::Expression(Expression::from(Assign::new(
|
||||
AssignOp::Exp,
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a /= b",
|
||||
vec![Statement::Expression(Expression::from(Assign::new(
|
||||
AssignOp::Div,
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a %= b",
|
||||
vec![Statement::Expression(Expression::from(Assign::new(
|
||||
AssignOp::Mod,
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a &= b",
|
||||
vec![Statement::Expression(Expression::from(Assign::new(
|
||||
AssignOp::And,
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a |= b",
|
||||
vec![Statement::Expression(Expression::from(Assign::new(
|
||||
AssignOp::Or,
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a ^= b",
|
||||
vec![Statement::Expression(Expression::from(Assign::new(
|
||||
AssignOp::Xor,
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a <<= b",
|
||||
vec![Statement::Expression(Expression::from(Assign::new(
|
||||
AssignOp::Shl,
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a >>= b",
|
||||
vec![Statement::Expression(Expression::from(Assign::new(
|
||||
AssignOp::Shr,
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a >>>= b",
|
||||
vec![Statement::Expression(Expression::from(Assign::new(
|
||||
AssignOp::Ushr,
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a %= 10 / 2",
|
||||
vec![Statement::Expression(Expression::from(Assign::new(
|
||||
AssignOp::Mod,
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Binary::new(
|
||||
ArithmeticOp::Div.into(),
|
||||
Literal::from(10).into(),
|
||||
Literal::from(2).into(),
|
||||
)
|
||||
.into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a ??= b",
|
||||
vec![Statement::Expression(Expression::from(Assign::new(
|
||||
AssignOp::Coalesce,
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_relational_operations() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a < b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
RelationalOp::LessThan.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a > b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
RelationalOp::GreaterThan.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a <= b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
RelationalOp::LessThanOrEqual.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a >= b",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
RelationalOp::GreaterThanOrEqual.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"p in o",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
RelationalOp::In.into(),
|
||||
Identifier::new(interner.get_or_intern_static("p", utf16!("p"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("o", utf16!("o"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_logical_expressions() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a && b || c && d || e",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
LogicalOp::Or.into(),
|
||||
Binary::new(
|
||||
LogicalOp::And.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)
|
||||
.into(),
|
||||
Binary::new(
|
||||
LogicalOp::Or.into(),
|
||||
Binary::new(
|
||||
LogicalOp::And.into(),
|
||||
Identifier::new(interner.get_or_intern_static("c", utf16!("c"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("d", utf16!("d"))).into(),
|
||||
)
|
||||
.into(),
|
||||
Identifier::new(interner.get_or_intern_static("e", utf16!("e"))).into(),
|
||||
)
|
||||
.into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"a ?? b ?? c",
|
||||
vec![Statement::Expression(Expression::from(Binary::new(
|
||||
LogicalOp::Coalesce.into(),
|
||||
Binary::new(
|
||||
LogicalOp::Coalesce.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)
|
||||
.into(),
|
||||
Identifier::new(interner.get_or_intern_static("c", utf16!("c"))).into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
check_invalid("a ?? b && c");
|
||||
check_invalid("a && b ?? c");
|
||||
check_invalid("a ?? b || c");
|
||||
check_invalid("a || b ?? c");
|
||||
}
|
||||
|
||||
macro_rules! check_non_reserved_identifier {
|
||||
($keyword:literal) => {{
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
format!("({})", $keyword).as_str(),
|
||||
vec![Statement::Expression(Expression::from(Identifier::new(
|
||||
interner.get_or_intern_static($keyword, utf16!($keyword)),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_non_reserved_identifiers() {
|
||||
// https://tc39.es/ecma262/#sec-keywords-and-reserved-words
|
||||
// Those that are always allowed as identifiers, but also appear as
|
||||
// keywords within certain syntactic productions, at places where
|
||||
// Identifier is not allowed: as, async, from, get, meta, of, set,
|
||||
// and target.
|
||||
|
||||
check_non_reserved_identifier!("as");
|
||||
check_non_reserved_identifier!("async");
|
||||
check_non_reserved_identifier!("from");
|
||||
check_non_reserved_identifier!("get");
|
||||
check_non_reserved_identifier!("meta");
|
||||
check_non_reserved_identifier!("of");
|
||||
check_non_reserved_identifier!("set");
|
||||
check_non_reserved_identifier!("target");
|
||||
}
|
||||
136
javascript-engine/external/boa/boa_parser/src/parser/expression/unary.rs
vendored
Normal file
136
javascript-engine/external/boa/boa_parser/src/parser/expression/unary.rs
vendored
Normal file
@@ -0,0 +1,136 @@
|
||||
//! Unary operator parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Expressions_and_Operators#Unary
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-unary-operators
|
||||
|
||||
use crate::{
|
||||
lexer::{Error as LexError, TokenKind},
|
||||
parser::{
|
||||
expression::{await_expr::AwaitExpression, update::UpdateExpression},
|
||||
AllowAwait, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
expression::{
|
||||
access::PropertyAccess,
|
||||
operator::{unary::UnaryOp, Unary},
|
||||
Identifier,
|
||||
},
|
||||
Expression, Keyword, Punctuator,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Parses a unary expression.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Expressions_and_Operators#Unary
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-UnaryExpression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser) struct UnaryExpression {
|
||||
name: Option<Identifier>,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl UnaryExpression {
|
||||
/// Creates a new `UnaryExpression` parser.
|
||||
pub(in crate::parser) fn new<N, Y, A>(name: N, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for UnaryExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Expression;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("UnaryExpression", "Parsing");
|
||||
|
||||
let tok = cursor.peek(0, interner).or_abrupt()?;
|
||||
let token_start = tok.span().start();
|
||||
match tok.kind() {
|
||||
TokenKind::Keyword((Keyword::Delete | Keyword::Void | Keyword::TypeOf, true)) => Err(
|
||||
Error::general("Keyword must not contain escaped characters", token_start),
|
||||
),
|
||||
TokenKind::Keyword((Keyword::Delete, false)) => {
|
||||
cursor.advance(interner);
|
||||
let position = cursor.peek(0, interner).or_abrupt()?.span().start();
|
||||
let val = self.parse(cursor, interner)?;
|
||||
|
||||
match val {
|
||||
Expression::Identifier(_) if cursor.strict_mode() => {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"cannot delete variables in strict mode".into(),
|
||||
token_start,
|
||||
)));
|
||||
}
|
||||
Expression::PropertyAccess(PropertyAccess::Private(_)) => {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"cannot delete private fields".into(),
|
||||
position,
|
||||
)));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Ok(Unary::new(UnaryOp::Delete, val).into())
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Void, false)) => {
|
||||
cursor.advance(interner);
|
||||
Ok(Unary::new(UnaryOp::Void, self.parse(cursor, interner)?).into())
|
||||
}
|
||||
TokenKind::Keyword((Keyword::TypeOf, false)) => {
|
||||
cursor.advance(interner);
|
||||
Ok(Unary::new(UnaryOp::TypeOf, self.parse(cursor, interner)?).into())
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Add) => {
|
||||
cursor.advance(interner);
|
||||
Ok(Unary::new(UnaryOp::Plus, self.parse(cursor, interner)?).into())
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Sub) => {
|
||||
cursor.advance(interner);
|
||||
Ok(Unary::new(UnaryOp::Minus, self.parse(cursor, interner)?).into())
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Neg) => {
|
||||
cursor.advance(interner);
|
||||
Ok(Unary::new(UnaryOp::Tilde, self.parse(cursor, interner)?).into())
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Not) => {
|
||||
cursor.advance(interner);
|
||||
Ok(Unary::new(UnaryOp::Not, self.parse(cursor, interner)?).into())
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Await, true)) if self.allow_await.0 => {
|
||||
Err(Error::general(
|
||||
"Keyword 'await' must not contain escaped characters",
|
||||
token_start,
|
||||
))
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Await, false)) if self.allow_await.0 => {
|
||||
Ok((AwaitExpression::new(self.allow_yield).parse(cursor, interner)?).into())
|
||||
}
|
||||
_ => UpdateExpression::new(self.name, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner),
|
||||
}
|
||||
}
|
||||
}
|
||||
167
javascript-engine/external/boa/boa_parser/src/parser/expression/update.rs
vendored
Normal file
167
javascript-engine/external/boa/boa_parser/src/parser/expression/update.rs
vendored
Normal file
@@ -0,0 +1,167 @@
|
||||
//! Update expression parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-update-expressions
|
||||
|
||||
use crate::{
|
||||
lexer::{Error as LexError, TokenKind},
|
||||
parser::{
|
||||
expression::{
|
||||
check_strict_arguments_or_eval, left_hand_side::LeftHandSideExpression,
|
||||
unary::UnaryExpression,
|
||||
},
|
||||
AllowAwait, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
expression::{
|
||||
operator::{unary::UnaryOp, Unary},
|
||||
Identifier,
|
||||
},
|
||||
Expression, Position, Punctuator,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Parses an update expression.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-UpdateExpression
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct UpdateExpression {
|
||||
name: Option<Identifier>,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl UpdateExpression {
|
||||
/// Creates a new `UpdateExpression` parser.
|
||||
pub(super) fn new<N, Y, A>(name: N, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
N: Into<Option<Identifier>>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
name: name.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <https://tc39.es/ecma262/multipage/syntax-directed-operations.html#sec-static-semantics-assignmenttargettype>
|
||||
/// This function checks if the target type is simple
|
||||
fn is_simple(expr: &Expression, position: Position, strict: bool) -> ParseResult<bool> {
|
||||
match expr {
|
||||
Expression::Identifier(ident) => {
|
||||
if strict {
|
||||
check_strict_arguments_or_eval(*ident, position)?;
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
Expression::PropertyAccess(_) => Ok(true),
|
||||
_ => Ok(false),
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for UpdateExpression
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Expression;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("UpdateExpression", "Parsing");
|
||||
|
||||
let tok = cursor.peek(0, interner).or_abrupt()?;
|
||||
let position = tok.span().start();
|
||||
match tok.kind() {
|
||||
TokenKind::Punctuator(Punctuator::Inc) => {
|
||||
cursor
|
||||
.next(interner)?
|
||||
.expect("Punctuator::Inc token disappeared");
|
||||
|
||||
let target = UnaryExpression::new(self.name, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
// https://tc39.es/ecma262/#sec-update-expressions-static-semantics-early-errors
|
||||
if !is_simple(&target, position, cursor.strict_mode())? {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Invalid left-hand side in assignment".into(),
|
||||
position,
|
||||
)));
|
||||
}
|
||||
|
||||
return Ok(Unary::new(UnaryOp::IncrementPre, target).into());
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Dec) => {
|
||||
cursor
|
||||
.next(interner)?
|
||||
.expect("Punctuator::Dec token disappeared");
|
||||
|
||||
let target = UnaryExpression::new(self.name, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
// https://tc39.es/ecma262/#sec-update-expressions-static-semantics-early-errors
|
||||
if !is_simple(&target, position, cursor.strict_mode())? {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Invalid left-hand side in assignment".into(),
|
||||
position,
|
||||
)));
|
||||
}
|
||||
|
||||
return Ok(Unary::new(UnaryOp::DecrementPre, target).into());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let lhs = LeftHandSideExpression::new(self.name, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
if cursor.peek_is_line_terminator(0, interner)?.unwrap_or(true) {
|
||||
return Ok(lhs);
|
||||
}
|
||||
|
||||
if let Some(tok) = cursor.peek(0, interner)? {
|
||||
let token_start = tok.span().start();
|
||||
match tok.kind() {
|
||||
TokenKind::Punctuator(Punctuator::Inc) => {
|
||||
cursor
|
||||
.next(interner)?
|
||||
.expect("Punctuator::Inc token disappeared");
|
||||
// https://tc39.es/ecma262/#sec-update-expressions-static-semantics-early-errors
|
||||
if !is_simple(&lhs, token_start, cursor.strict_mode())? {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Invalid left-hand side in assignment".into(),
|
||||
token_start,
|
||||
)));
|
||||
}
|
||||
|
||||
return Ok(Unary::new(UnaryOp::IncrementPost, lhs).into());
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Dec) => {
|
||||
cursor
|
||||
.next(interner)?
|
||||
.expect("Punctuator::Dec token disappeared");
|
||||
// https://tc39.es/ecma262/#sec-update-expressions-static-semantics-early-errors
|
||||
if !is_simple(&lhs, token_start, cursor.strict_mode())? {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Invalid left-hand side in assignment".into(),
|
||||
token_start,
|
||||
)));
|
||||
}
|
||||
|
||||
return Ok(Unary::new(UnaryOp::DecrementPost, lhs).into());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(lhs)
|
||||
}
|
||||
}
|
||||
461
javascript-engine/external/boa/boa_parser/src/parser/function/mod.rs
vendored
Normal file
461
javascript-engine/external/boa/boa_parser/src/parser/function/mod.rs
vendored
Normal file
@@ -0,0 +1,461 @@
|
||||
//! Function definition parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/function
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-function-definitions
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::{
|
||||
lexer::{Error as LexError, InputElement, TokenKind},
|
||||
parser::{
|
||||
expression::{BindingIdentifier, Initializer},
|
||||
statement::{ArrayBindingPattern, ObjectBindingPattern, StatementList},
|
||||
AllowAwait, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
self as ast,
|
||||
declaration::Variable,
|
||||
function::{FormalParameterList, FormalParameterListFlags},
|
||||
Punctuator,
|
||||
};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Formal parameters parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Glossary/Parameter
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-FormalParameters
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser) struct FormalParameters {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl FormalParameters {
|
||||
/// Creates a new `FormalParameters` parser.
|
||||
pub(in crate::parser) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for FormalParameters
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = FormalParameterList;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("FormalParameters", "Parsing");
|
||||
cursor.set_goal(InputElement::RegExp);
|
||||
|
||||
let mut params = Vec::new();
|
||||
|
||||
let next_token = cursor.peek(0, interner).or_abrupt()?;
|
||||
if next_token.kind() == &TokenKind::Punctuator(Punctuator::CloseParen) {
|
||||
return Ok(FormalParameterList::default());
|
||||
}
|
||||
let start_position = next_token.span().start();
|
||||
|
||||
loop {
|
||||
let mut rest_param = false;
|
||||
|
||||
let next_param = match cursor.peek(0, interner)? {
|
||||
Some(tok) if tok.kind() == &TokenKind::Punctuator(Punctuator::Spread) => {
|
||||
rest_param = true;
|
||||
FunctionRestParameter::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?
|
||||
}
|
||||
_ => FormalParameter::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?,
|
||||
};
|
||||
|
||||
if next_param.is_rest_param() && next_param.init().is_some() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Rest parameter may not have a default initializer".into(),
|
||||
start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
params.push(next_param);
|
||||
|
||||
if cursor.peek(0, interner).or_abrupt()?.kind()
|
||||
== &TokenKind::Punctuator(Punctuator::CloseParen)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
if rest_param {
|
||||
let next = cursor.next(interner)?.expect("peeked token disappeared");
|
||||
return Err(Error::unexpected(
|
||||
next.to_string(interner),
|
||||
next.span(),
|
||||
"rest parameter must be the last formal parameter",
|
||||
));
|
||||
}
|
||||
|
||||
cursor.expect(Punctuator::Comma, "parameter list", interner)?;
|
||||
if cursor.peek(0, interner).or_abrupt()?.kind()
|
||||
== &TokenKind::Punctuator(Punctuator::CloseParen)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let params = FormalParameterList::from_parameters(params);
|
||||
|
||||
// Early Error: It is a Syntax Error if IsSimpleParameterList of FormalParameterList is false
|
||||
// and BoundNames of FormalParameterList contains any duplicate elements.
|
||||
if !params.flags().contains(FormalParameterListFlags::IS_SIMPLE)
|
||||
&& params
|
||||
.flags()
|
||||
.contains(FormalParameterListFlags::HAS_DUPLICATES)
|
||||
{
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Duplicate parameter name not allowed in this context".into(),
|
||||
start_position,
|
||||
)));
|
||||
}
|
||||
Ok(params)
|
||||
}
|
||||
}
|
||||
|
||||
/// `UniqueFormalParameters` parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-UniqueFormalParameters
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser) struct UniqueFormalParameters {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl UniqueFormalParameters {
|
||||
/// Creates a new `UniqueFormalParameters` parser.
|
||||
pub(in crate::parser) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for UniqueFormalParameters
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = FormalParameterList;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let params_start_position = cursor
|
||||
.expect(
|
||||
TokenKind::Punctuator(Punctuator::OpenParen),
|
||||
"unique formal parameters",
|
||||
interner,
|
||||
)?
|
||||
.span()
|
||||
.end();
|
||||
let params =
|
||||
FormalParameters::new(self.allow_yield, self.allow_await).parse(cursor, interner)?;
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::CloseParen),
|
||||
"unique formal parameters",
|
||||
interner,
|
||||
)?;
|
||||
|
||||
// Early Error: UniqueFormalParameters : FormalParameters
|
||||
if params.has_duplicates() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"duplicate parameter name not allowed in unique formal parameters".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
Ok(params)
|
||||
}
|
||||
}
|
||||
|
||||
/// Rest parameter parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/rest_parameters
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-FunctionRestParameter
|
||||
type FunctionRestParameter = BindingRestElement;
|
||||
|
||||
/// Rest parameter parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/rest_parameters
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-BindingRestElement
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct BindingRestElement {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl BindingRestElement {
|
||||
/// Creates a new `BindingRestElement` parser.
|
||||
fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for BindingRestElement
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::function::FormalParameter;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("BindingRestElement", "Parsing");
|
||||
cursor.expect(Punctuator::Spread, "rest parameter", interner)?;
|
||||
|
||||
if let Some(t) = cursor.peek(0, interner)? {
|
||||
let declaration = match *t.kind() {
|
||||
TokenKind::Punctuator(Punctuator::OpenBlock) => {
|
||||
let param = ObjectBindingPattern::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
let init = cursor
|
||||
.peek(0, interner)?
|
||||
.cloned()
|
||||
.filter(|t| {
|
||||
// Check that this is an initializer before attempting parse.
|
||||
*t.kind() == TokenKind::Punctuator(Punctuator::Assign)
|
||||
})
|
||||
.map(|_| {
|
||||
Initializer::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)
|
||||
})
|
||||
.transpose()?;
|
||||
Variable::from_pattern(param.into(), init)
|
||||
}
|
||||
|
||||
TokenKind::Punctuator(Punctuator::OpenBracket) => Variable::from_pattern(
|
||||
ArrayBindingPattern::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?
|
||||
.into(),
|
||||
None,
|
||||
),
|
||||
|
||||
_ => {
|
||||
let params = BindingIdentifier::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
let init = cursor
|
||||
.peek(0, interner)?
|
||||
.cloned()
|
||||
.filter(|t| {
|
||||
// Check that this is an initializer before attempting parse.
|
||||
*t.kind() == TokenKind::Punctuator(Punctuator::Assign)
|
||||
})
|
||||
.map(|_| {
|
||||
Initializer::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)
|
||||
})
|
||||
.transpose()?;
|
||||
|
||||
Variable::from_identifier(params, init)
|
||||
}
|
||||
};
|
||||
Ok(Self::Output::new(declaration, true))
|
||||
} else {
|
||||
Ok(Self::Output::new(
|
||||
Variable::from_identifier(Sym::EMPTY_STRING.into(), None),
|
||||
true,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Formal parameter parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Glossary/Parameter
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-FormalParameter
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser) struct FormalParameter {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl FormalParameter {
|
||||
/// Creates a new `FormalParameter` parser.
|
||||
pub(in crate::parser) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for FormalParameter
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::function::FormalParameter;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("FormalParameter", "Parsing");
|
||||
|
||||
if let Some(t) = cursor.peek(0, interner)? {
|
||||
let declaration = match *t.kind() {
|
||||
TokenKind::Punctuator(Punctuator::OpenBlock) => {
|
||||
let bindings = ObjectBindingPattern::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
let init = if *cursor.peek(0, interner).or_abrupt()?.kind()
|
||||
== TokenKind::Punctuator(Punctuator::Assign)
|
||||
{
|
||||
Some(
|
||||
Initializer::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Variable::from_pattern(bindings.into(), init)
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenBracket) => {
|
||||
let bindings = ArrayBindingPattern::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
let init = if *cursor.peek(0, interner).or_abrupt()?.kind()
|
||||
== TokenKind::Punctuator(Punctuator::Assign)
|
||||
{
|
||||
Some(
|
||||
Initializer::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Variable::from_pattern(bindings.into(), init)
|
||||
}
|
||||
_ => {
|
||||
let ident = BindingIdentifier::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
let init = if *cursor.peek(0, interner).or_abrupt()?.kind()
|
||||
== TokenKind::Punctuator(Punctuator::Assign)
|
||||
{
|
||||
Some(
|
||||
Initializer::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Variable::from_identifier(ident, init)
|
||||
}
|
||||
};
|
||||
Ok(Self::Output::new(declaration, false))
|
||||
} else {
|
||||
Ok(Self::Output::new(
|
||||
Variable::from_identifier(Sym::EMPTY_STRING.into(), None),
|
||||
false,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A `FunctionBody` is equivalent to a `FunctionStatementList`.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-FunctionBody
|
||||
pub(in crate::parser) type FunctionBody = FunctionStatementList;
|
||||
|
||||
/// The possible `TokenKind` which indicate the end of a function statement.
|
||||
pub(in crate::parser) const FUNCTION_BREAK_TOKENS: [TokenKind; 1] =
|
||||
[TokenKind::Punctuator(Punctuator::CloseBlock)];
|
||||
|
||||
/// A function statement list
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-FunctionStatementList
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser) struct FunctionStatementList {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl FunctionStatementList {
|
||||
/// Creates a new `FunctionStatementList` parser.
|
||||
pub(in crate::parser) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for FunctionStatementList
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::StatementList;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("FunctionStatementList", "Parsing");
|
||||
|
||||
StatementList::new(
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
true,
|
||||
&FUNCTION_BREAK_TOKENS,
|
||||
true,
|
||||
false,
|
||||
)
|
||||
.parse(cursor, interner)
|
||||
}
|
||||
}
|
||||
714
javascript-engine/external/boa/boa_parser/src/parser/function/tests.rs
vendored
Normal file
714
javascript-engine/external/boa/boa_parser/src/parser/function/tests.rs
vendored
Normal file
@@ -0,0 +1,714 @@
|
||||
use crate::parser::tests::{check_invalid, check_parser};
|
||||
use boa_ast::{
|
||||
declaration::{LexicalDeclaration, Variable},
|
||||
expression::{
|
||||
operator::{binary::ArithmeticOp, Binary},
|
||||
Identifier,
|
||||
},
|
||||
function::{
|
||||
ArrowFunction, FormalParameter, FormalParameterList, FormalParameterListFlags, Function,
|
||||
},
|
||||
statement::Return,
|
||||
Declaration, Expression, Statement, StatementList, StatementListItem,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
/// Checks basic function declaration parsing.
|
||||
#[test]
|
||||
fn check_basic() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
));
|
||||
assert_eq!(params.flags(), FormalParameterListFlags::default());
|
||||
assert_eq!(params.length(), 1);
|
||||
|
||||
check_parser(
|
||||
"function foo(a) { return a; }",
|
||||
vec![Declaration::Function(Function::new(
|
||||
Some(interner.get_or_intern_static("foo", utf16!("foo")).into()),
|
||||
params,
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(
|
||||
Identifier::from(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
)),
|
||||
))]
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks if duplicate parameter names are allowed with strict mode off.
|
||||
#[test]
|
||||
fn check_duplicates_strict_off() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(vec![
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
),
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
),
|
||||
]);
|
||||
assert_eq!(
|
||||
params.flags(),
|
||||
FormalParameterListFlags::default().union(FormalParameterListFlags::HAS_DUPLICATES)
|
||||
);
|
||||
assert_eq!(params.length(), 2);
|
||||
check_parser(
|
||||
"function foo(a, a) { return a; }",
|
||||
vec![Declaration::Function(Function::new(
|
||||
Some(interner.get_or_intern_static("foo", utf16!("foo")).into()),
|
||||
params,
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(
|
||||
Identifier::from(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
)),
|
||||
))]
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks if duplicate parameter names are an error with strict mode on.
|
||||
#[test]
|
||||
fn check_duplicates_strict_on() {
|
||||
check_invalid("'use strict'; function foo(a, a) {}");
|
||||
}
|
||||
|
||||
/// Checks basic function declaration parsing with automatic semicolon insertion.
|
||||
#[test]
|
||||
fn check_basic_semicolon_insertion() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
));
|
||||
assert_eq!(params.flags(), FormalParameterListFlags::default());
|
||||
assert_eq!(params.length(), 1);
|
||||
|
||||
check_parser(
|
||||
"function foo(a) { return a }",
|
||||
vec![Declaration::Function(Function::new(
|
||||
Some(interner.get_or_intern_static("foo", utf16!("foo")).into()),
|
||||
params,
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(
|
||||
Identifier::from(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
)),
|
||||
))]
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks functions with empty returns.
|
||||
#[test]
|
||||
fn check_empty_return() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
));
|
||||
assert_eq!(params.flags(), FormalParameterListFlags::default());
|
||||
assert_eq!(params.length(), 1);
|
||||
check_parser(
|
||||
"function foo(a) { return; }",
|
||||
vec![Declaration::Function(Function::new(
|
||||
Some(interner.get_or_intern_static("foo", utf16!("foo")).into()),
|
||||
params,
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(None),
|
||||
))]
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks functions with empty returns without semicolon
|
||||
#[test]
|
||||
fn check_empty_return_semicolon_insertion() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
));
|
||||
assert_eq!(params.flags(), FormalParameterListFlags::default());
|
||||
assert_eq!(params.length(), 1);
|
||||
check_parser(
|
||||
"function foo(a) { return }",
|
||||
vec![Declaration::Function(Function::new(
|
||||
Some(interner.get_or_intern_static("foo", utf16!("foo")).into()),
|
||||
params,
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(None),
|
||||
))]
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks rest operator parsing.
|
||||
#[test]
|
||||
fn check_rest_operator() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(vec![
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
),
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("b", utf16!("b")).into(), None),
|
||||
true,
|
||||
),
|
||||
]);
|
||||
assert_eq!(
|
||||
params.flags(),
|
||||
FormalParameterListFlags::empty().union(FormalParameterListFlags::HAS_REST_PARAMETER)
|
||||
);
|
||||
assert_eq!(params.length(), 1);
|
||||
check_parser(
|
||||
"function foo(a, ...b) {}",
|
||||
vec![Declaration::Function(Function::new(
|
||||
Some(interner.get_or_intern_static("foo", utf16!("foo")).into()),
|
||||
params,
|
||||
StatementList::default(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks an arrow function with only a rest parameter.
|
||||
#[test]
|
||||
fn check_arrow_only_rest() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
true,
|
||||
));
|
||||
assert_eq!(
|
||||
params.flags(),
|
||||
FormalParameterListFlags::empty().union(FormalParameterListFlags::HAS_REST_PARAMETER)
|
||||
);
|
||||
assert_eq!(params.length(), 0);
|
||||
check_parser(
|
||||
"(...a) => {}",
|
||||
vec![Statement::Expression(Expression::from(ArrowFunction::new(
|
||||
None,
|
||||
params,
|
||||
StatementList::default(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks an arrow function with a rest parameter.
|
||||
#[test]
|
||||
fn check_arrow_rest() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(vec![
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
),
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("b", utf16!("b")).into(), None),
|
||||
false,
|
||||
),
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("c", utf16!("c")).into(), None),
|
||||
true,
|
||||
),
|
||||
]);
|
||||
assert_eq!(
|
||||
params.flags(),
|
||||
FormalParameterListFlags::empty().union(FormalParameterListFlags::HAS_REST_PARAMETER)
|
||||
);
|
||||
assert_eq!(params.length(), 2);
|
||||
check_parser(
|
||||
"(a, b, ...c) => {}",
|
||||
vec![Statement::Expression(Expression::from(ArrowFunction::new(
|
||||
None,
|
||||
params,
|
||||
StatementList::default(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks an arrow function with expression return.
|
||||
#[test]
|
||||
fn check_arrow() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(vec![
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
),
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("b", utf16!("b")).into(), None),
|
||||
false,
|
||||
),
|
||||
]);
|
||||
assert_eq!(params.flags(), FormalParameterListFlags::default());
|
||||
assert_eq!(params.length(), 2);
|
||||
check_parser(
|
||||
"(a, b) => { return a + b; }",
|
||||
vec![Statement::Expression(Expression::from(ArrowFunction::new(
|
||||
None,
|
||||
params,
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(
|
||||
Binary::new(
|
||||
ArithmeticOp::Add.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)
|
||||
.into(),
|
||||
)),
|
||||
))]
|
||||
.into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks an arrow function with expression return and automatic semicolon insertion
|
||||
#[test]
|
||||
fn check_arrow_semicolon_insertion() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(vec![
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
),
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("b", utf16!("b")).into(), None),
|
||||
false,
|
||||
),
|
||||
]);
|
||||
check_parser(
|
||||
"(a, b) => { return a + b }",
|
||||
vec![Statement::Expression(Expression::from(ArrowFunction::new(
|
||||
None,
|
||||
params,
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(
|
||||
Binary::new(
|
||||
ArithmeticOp::Add.into(),
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Identifier::new(interner.get_or_intern_static("b", utf16!("b"))).into(),
|
||||
)
|
||||
.into(),
|
||||
)),
|
||||
))]
|
||||
.into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks arrow function with empty return
|
||||
#[test]
|
||||
fn check_arrow_epty_return() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(vec![
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
),
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("b", utf16!("b")).into(), None),
|
||||
false,
|
||||
),
|
||||
]);
|
||||
check_parser(
|
||||
"(a, b) => { return; }",
|
||||
vec![Statement::Expression(Expression::from(ArrowFunction::new(
|
||||
None,
|
||||
params,
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(None),
|
||||
))]
|
||||
.into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks an arrow function with empty return, with automatic semicolon insertion.
|
||||
#[test]
|
||||
fn check_arrow_empty_return_semicolon_insertion() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(vec![
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
),
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("b", utf16!("b")).into(), None),
|
||||
false,
|
||||
),
|
||||
]);
|
||||
check_parser(
|
||||
"(a, b) => { return }",
|
||||
vec![Statement::Expression(Expression::from(ArrowFunction::new(
|
||||
None,
|
||||
params,
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(None),
|
||||
))]
|
||||
.into(),
|
||||
)))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_arrow_assignment() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
));
|
||||
assert_eq!(params.flags(), FormalParameterListFlags::default());
|
||||
assert_eq!(params.length(), 1);
|
||||
check_parser(
|
||||
"let foo = (a) => { return a };",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Let(
|
||||
vec![Variable::from_identifier(
|
||||
Identifier::new(interner.get_or_intern_static("foo", utf16!("foo"))),
|
||||
Some(
|
||||
ArrowFunction::new(
|
||||
Some(interner.get_or_intern_static("foo", utf16!("foo")).into()),
|
||||
params,
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a")))
|
||||
.into(),
|
||||
)),
|
||||
))]
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_arrow_assignment_nobrackets() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
));
|
||||
assert_eq!(params.flags(), FormalParameterListFlags::default());
|
||||
assert_eq!(params.length(), 1);
|
||||
check_parser(
|
||||
"let foo = (a) => a;",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Let(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("foo", utf16!("foo")).into(),
|
||||
Some(
|
||||
ArrowFunction::new(
|
||||
Some(interner.get_or_intern_static("foo", utf16!("foo")).into()),
|
||||
params,
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a")))
|
||||
.into(),
|
||||
)),
|
||||
))]
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_arrow_assignment_noparenthesis() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
));
|
||||
assert_eq!(params.flags(), FormalParameterListFlags::default());
|
||||
assert_eq!(params.length(), 1);
|
||||
check_parser(
|
||||
"let foo = a => { return a };",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Let(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("foo", utf16!("foo")).into(),
|
||||
Some(
|
||||
ArrowFunction::new(
|
||||
Some(interner.get_or_intern_static("foo", utf16!("foo")).into()),
|
||||
params,
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a")))
|
||||
.into(),
|
||||
)),
|
||||
))]
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_arrow_assignment_noparenthesis_nobrackets() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
));
|
||||
assert_eq!(params.flags(), FormalParameterListFlags::default());
|
||||
assert_eq!(params.length(), 1);
|
||||
check_parser(
|
||||
"let foo = a => a;",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Let(
|
||||
vec![Variable::from_identifier(
|
||||
Identifier::new(interner.get_or_intern_static("foo", utf16!("foo"))),
|
||||
Some(
|
||||
ArrowFunction::new(
|
||||
Some(interner.get_or_intern_static("foo", utf16!("foo")).into()),
|
||||
params,
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a")))
|
||||
.into(),
|
||||
)),
|
||||
))]
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_arrow_assignment_2arg() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(vec![
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
),
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("b", utf16!("b")).into(), None),
|
||||
false,
|
||||
),
|
||||
]);
|
||||
assert_eq!(params.flags(), FormalParameterListFlags::default());
|
||||
assert_eq!(params.length(), 2);
|
||||
check_parser(
|
||||
"let foo = (a, b) => { return a };",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Let(
|
||||
vec![Variable::from_identifier(
|
||||
Identifier::new(interner.get_or_intern_static("foo", utf16!("foo"))),
|
||||
Some(
|
||||
ArrowFunction::new(
|
||||
Some(interner.get_or_intern_static("foo", utf16!("foo")).into()),
|
||||
params,
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a")))
|
||||
.into(),
|
||||
)),
|
||||
))]
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_arrow_assignment_2arg_nobrackets() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(vec![
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
),
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("b", utf16!("b")).into(), None),
|
||||
false,
|
||||
),
|
||||
]);
|
||||
assert_eq!(params.flags(), FormalParameterListFlags::default());
|
||||
assert_eq!(params.length(), 2);
|
||||
check_parser(
|
||||
"let foo = (a, b) => a;",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Let(
|
||||
vec![Variable::from_identifier(
|
||||
Identifier::new(interner.get_or_intern_static("foo", utf16!("foo"))),
|
||||
Some(
|
||||
ArrowFunction::new(
|
||||
Some(interner.get_or_intern_static("foo", utf16!("foo")).into()),
|
||||
params,
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a")))
|
||||
.into(),
|
||||
)),
|
||||
))]
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_arrow_assignment_3arg() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(vec![
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
),
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("b", utf16!("b")).into(), None),
|
||||
false,
|
||||
),
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("c", utf16!("c")).into(), None),
|
||||
false,
|
||||
),
|
||||
]);
|
||||
assert_eq!(params.flags(), FormalParameterListFlags::default());
|
||||
assert_eq!(params.length(), 3);
|
||||
check_parser(
|
||||
"let foo = (a, b, c) => { return a };",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Let(
|
||||
vec![Variable::from_identifier(
|
||||
Identifier::new(interner.get_or_intern_static("foo", utf16!("foo"))),
|
||||
Some(
|
||||
ArrowFunction::new(
|
||||
Some(interner.get_or_intern_static("foo", utf16!("foo")).into()),
|
||||
params,
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a")))
|
||||
.into(),
|
||||
)),
|
||||
))]
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_arrow_assignment_3arg_nobrackets() {
|
||||
let interner = &mut Interner::default();
|
||||
let params = FormalParameterList::from(vec![
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("a", utf16!("a")).into(), None),
|
||||
false,
|
||||
),
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("b", utf16!("b")).into(), None),
|
||||
false,
|
||||
),
|
||||
FormalParameter::new(
|
||||
Variable::from_identifier(interner.get_or_intern_static("c", utf16!("c")).into(), None),
|
||||
false,
|
||||
),
|
||||
]);
|
||||
assert_eq!(params.flags(), FormalParameterListFlags::default());
|
||||
assert_eq!(params.length(), 3);
|
||||
check_parser(
|
||||
"let foo = (a, b, c) => a;",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Let(
|
||||
vec![Variable::from_identifier(
|
||||
Identifier::new(interner.get_or_intern_static("foo", utf16!("foo"))),
|
||||
Some(
|
||||
ArrowFunction::new(
|
||||
Some(interner.get_or_intern_static("foo", utf16!("foo")).into()),
|
||||
params,
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a")))
|
||||
.into(),
|
||||
)),
|
||||
))]
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
359
javascript-engine/external/boa/boa_parser/src/parser/mod.rs
vendored
Normal file
359
javascript-engine/external/boa/boa_parser/src/parser/mod.rs
vendored
Normal file
@@ -0,0 +1,359 @@
|
||||
//! Boa parser implementation.
|
||||
|
||||
mod cursor;
|
||||
mod expression;
|
||||
mod statement;
|
||||
|
||||
pub(crate) mod function;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::{
|
||||
error::ParseResult,
|
||||
parser::{
|
||||
cursor::Cursor,
|
||||
function::{FormalParameters, FunctionStatementList},
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
expression::Identifier,
|
||||
function::FormalParameterList,
|
||||
operations::{
|
||||
contains, top_level_lexically_declared_names, top_level_var_declared_names, ContainsSymbol,
|
||||
},
|
||||
Position, StatementList,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use rustc_hash::FxHashSet;
|
||||
use std::io::Read;
|
||||
|
||||
/// Trait implemented by parsers.
|
||||
///
|
||||
/// This makes it possible to abstract over the underlying implementation of a parser.
|
||||
trait TokenParser<R>: Sized
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
/// Output type for the parser.
|
||||
type Output; // = Node; waiting for https://github.com/rust-lang/rust/issues/29661
|
||||
|
||||
/// Parses the token stream using the current parser.
|
||||
///
|
||||
/// This method needs to be provided by the implementor type.
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output>;
|
||||
}
|
||||
|
||||
/// Boolean representing if the parser should allow a `yield` keyword.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
struct AllowYield(bool);
|
||||
|
||||
impl From<bool> for AllowYield {
|
||||
fn from(allow: bool) -> Self {
|
||||
Self(allow)
|
||||
}
|
||||
}
|
||||
|
||||
/// Boolean representing if the parser should allow a `await` keyword.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
struct AllowAwait(bool);
|
||||
|
||||
impl From<bool> for AllowAwait {
|
||||
fn from(allow: bool) -> Self {
|
||||
Self(allow)
|
||||
}
|
||||
}
|
||||
|
||||
/// Boolean representing if the parser should allow a `in` keyword.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
struct AllowIn(bool);
|
||||
|
||||
impl From<bool> for AllowIn {
|
||||
fn from(allow: bool) -> Self {
|
||||
Self(allow)
|
||||
}
|
||||
}
|
||||
|
||||
/// Boolean representing if the parser should allow a `return` keyword.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
struct AllowReturn(bool);
|
||||
|
||||
impl From<bool> for AllowReturn {
|
||||
fn from(allow: bool) -> Self {
|
||||
Self(allow)
|
||||
}
|
||||
}
|
||||
|
||||
/// Boolean representing if the parser should allow a `default` keyword.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
struct AllowDefault(bool);
|
||||
|
||||
impl From<bool> for AllowDefault {
|
||||
fn from(allow: bool) -> Self {
|
||||
Self(allow)
|
||||
}
|
||||
}
|
||||
|
||||
/// Parser for the ECMAScript language.
|
||||
///
|
||||
/// This parser implementation tries to be conformant to the most recent
|
||||
/// [ECMAScript language specification], and it also implements some legacy features like
|
||||
/// [labelled functions][label] or [duplicated block-level function definitions][block].
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-ecmascript-language-source-code
|
||||
/// [label]: https://tc39.es/ecma262/#sec-labelled-function-declarations
|
||||
/// [block]: https://tc39.es/ecma262/#sec-block-duplicates-allowed-static-semantics
|
||||
#[derive(Debug)]
|
||||
pub struct Parser<R> {
|
||||
/// Cursor of the parser, pointing to the lexer and used to get tokens for the parser.
|
||||
cursor: Cursor<R>,
|
||||
}
|
||||
|
||||
impl<R> Parser<R> {
|
||||
/// Create a new `Parser` with a reader as the input to parse.
|
||||
pub fn new(reader: R) -> Self
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
Self {
|
||||
cursor: Cursor::new(reader),
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the parser strict mode to true.
|
||||
pub fn set_strict(&mut self)
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
self.cursor.set_strict_mode(true);
|
||||
}
|
||||
|
||||
/// Set the parser strict mode to true.
|
||||
pub fn set_json_parse(&mut self)
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
self.cursor.set_json_parse(true);
|
||||
}
|
||||
|
||||
/// Parse the full input as a [ECMAScript Script][spec] into the boa AST representation.
|
||||
/// The resulting `StatementList` can be compiled into boa bytecode and executed in the boa vm.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Will return `Err` on any parsing error, including invalid reads of the bytes being parsed.
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-Script
|
||||
pub fn parse_all(&mut self, interner: &mut Interner) -> ParseResult<StatementList>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
Script::new(false).parse(&mut self.cursor, interner)
|
||||
}
|
||||
|
||||
/// [`19.2.1.1 PerformEval ( x, strictCaller, direct )`][spec]
|
||||
///
|
||||
/// Parses the source text input of an `eval` call.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Will return `Err` on any parsing error, including invalid reads of the bytes being parsed.
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-performeval
|
||||
pub fn parse_eval(
|
||||
&mut self,
|
||||
direct: bool,
|
||||
interner: &mut Interner,
|
||||
) -> ParseResult<StatementList>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
Script::new(direct).parse(&mut self.cursor, interner)
|
||||
}
|
||||
|
||||
/// Parses the full input as an [ECMAScript `FunctionBody`][spec] into the boa AST representation.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Will return `Err` on any parsing error, including invalid reads of the bytes being parsed.
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-FunctionBody
|
||||
pub fn parse_function_body(
|
||||
&mut self,
|
||||
interner: &mut Interner,
|
||||
allow_yield: bool,
|
||||
allow_await: bool,
|
||||
) -> ParseResult<StatementList>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
FunctionStatementList::new(allow_yield, allow_await).parse(&mut self.cursor, interner)
|
||||
}
|
||||
|
||||
/// Parses the full input as an [ECMAScript `FormalParameterList`][spec] into the boa AST representation.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Will return `Err` on any parsing error, including invalid reads of the bytes being parsed.
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-FormalParameterList
|
||||
pub fn parse_formal_parameters(
|
||||
&mut self,
|
||||
interner: &mut Interner,
|
||||
allow_yield: bool,
|
||||
allow_await: bool,
|
||||
) -> ParseResult<FormalParameterList>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
FormalParameters::new(allow_yield, allow_await).parse(&mut self.cursor, interner)
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a full script.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-Script
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct Script {
|
||||
direct_eval: bool,
|
||||
}
|
||||
|
||||
impl Script {
|
||||
/// Create a new `Script` parser.
|
||||
#[inline]
|
||||
const fn new(direct_eval: bool) -> Self {
|
||||
Self { direct_eval }
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for Script
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = StatementList;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let statement_list = ScriptBody::new(true, cursor.strict_mode(), self.direct_eval)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
// It is a Syntax Error if the LexicallyDeclaredNames of ScriptBody contains any duplicate entries.
|
||||
// It is a Syntax Error if any element of the LexicallyDeclaredNames of ScriptBody also occurs in the VarDeclaredNames of ScriptBody.
|
||||
let mut lexical_names = FxHashSet::default();
|
||||
for name in top_level_lexically_declared_names(&statement_list) {
|
||||
if !lexical_names.insert(name) {
|
||||
return Err(Error::general(
|
||||
"lexical name declared multiple times",
|
||||
Position::new(1, 1),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
for name in top_level_var_declared_names(&statement_list) {
|
||||
if lexical_names.contains(&name) {
|
||||
return Err(Error::general(
|
||||
"lexical name declared multiple times",
|
||||
Position::new(1, 1),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(statement_list)
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a script body.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-ScriptBody
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct ScriptBody {
|
||||
directive_prologues: bool,
|
||||
strict: bool,
|
||||
direct_eval: bool,
|
||||
}
|
||||
|
||||
impl ScriptBody {
|
||||
/// Create a new `ScriptBody` parser.
|
||||
#[inline]
|
||||
const fn new(directive_prologues: bool, strict: bool, direct_eval: bool) -> Self {
|
||||
Self {
|
||||
directive_prologues,
|
||||
strict,
|
||||
direct_eval,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for ScriptBody
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = StatementList;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let body = self::statement::StatementList::new(
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
&[],
|
||||
self.directive_prologues,
|
||||
self.strict,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
if !self.direct_eval {
|
||||
// It is a Syntax Error if StatementList Contains super unless the source text containing super is eval
|
||||
// code that is being processed by a direct eval.
|
||||
// Additional early error rules for super within direct eval are defined in 19.2.1.1.
|
||||
if contains(&body, ContainsSymbol::Super) {
|
||||
return Err(Error::general("invalid super usage", Position::new(1, 1)));
|
||||
}
|
||||
// It is a Syntax Error if StatementList Contains NewTarget unless the source text containing NewTarget
|
||||
// is eval code that is being processed by a direct eval.
|
||||
// Additional early error rules for NewTarget in direct eval are defined in 19.2.1.1.
|
||||
if contains(&body, ContainsSymbol::NewTarget) {
|
||||
return Err(Error::general(
|
||||
"invalid new.target usage",
|
||||
Position::new(1, 1),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(body)
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper to check if any parameter names are declared in the given list.
|
||||
fn name_in_lexically_declared_names(
|
||||
bound_names: &[Identifier],
|
||||
lexical_names: &[Identifier],
|
||||
position: Position,
|
||||
) -> ParseResult<()> {
|
||||
for name in bound_names {
|
||||
if lexical_names.contains(name) {
|
||||
return Err(Error::General {
|
||||
message: "formal parameter declared in lexically declared names",
|
||||
position,
|
||||
});
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Trait to reduce boilerplate in the parser.
|
||||
trait OrAbrupt<T> {
|
||||
/// Will convert an `Ok(None)` to an [`Error::AbruptEnd`] or return the inner type if not.
|
||||
fn or_abrupt(self) -> ParseResult<T>;
|
||||
}
|
||||
|
||||
impl<T> OrAbrupt<T> for ParseResult<Option<T>> {
|
||||
fn or_abrupt(self) -> ParseResult<T> {
|
||||
self?.ok_or(Error::AbruptEnd)
|
||||
}
|
||||
}
|
||||
131
javascript-engine/external/boa/boa_parser/src/parser/statement/block/mod.rs
vendored
Normal file
131
javascript-engine/external/boa/boa_parser/src/parser/statement/block/mod.rs
vendored
Normal file
@@ -0,0 +1,131 @@
|
||||
//! Block statement parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/block
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-block
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{
|
||||
statement::StatementList, AllowAwait, AllowReturn, AllowYield, Cursor, OrAbrupt,
|
||||
ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
operations::{lexically_declared_names_legacy, var_declared_names},
|
||||
statement, Punctuator,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::io::Read;
|
||||
|
||||
/// The possible `TokenKind` which indicate the end of a block statement.
|
||||
const BLOCK_BREAK_TOKENS: [TokenKind; 1] = [TokenKind::Punctuator(Punctuator::CloseBlock)];
|
||||
|
||||
/// A `BlockStatement` is equivalent to a `Block`.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-BlockStatement
|
||||
pub(super) type BlockStatement = Block;
|
||||
|
||||
/// Variable declaration list parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/block
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-Block
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct Block {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
allow_return: AllowReturn,
|
||||
}
|
||||
|
||||
impl Block {
|
||||
/// Creates a new `Block` parser.
|
||||
pub(super) fn new<Y, A, R>(allow_yield: Y, allow_await: A, allow_return: R) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
R: Into<AllowReturn>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
allow_return: allow_return.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for Block
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = statement::Block;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("Block", "Parsing");
|
||||
cursor.expect(Punctuator::OpenBlock, "block", interner)?;
|
||||
if let Some(tk) = cursor.peek(0, interner)? {
|
||||
if tk.kind() == &TokenKind::Punctuator(Punctuator::CloseBlock) {
|
||||
cursor.advance(interner);
|
||||
return Ok(statement::Block::from(vec![]));
|
||||
}
|
||||
}
|
||||
let position = cursor.peek(0, interner).or_abrupt()?.span().start();
|
||||
let statement_list = StatementList::new(
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
self.allow_return,
|
||||
&BLOCK_BREAK_TOKENS,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.parse(cursor, interner)
|
||||
.map(statement::Block::from)?;
|
||||
cursor.expect(Punctuator::CloseBlock, "block", interner)?;
|
||||
|
||||
// It is a Syntax Error if the LexicallyDeclaredNames of StatementList contains any duplicate
|
||||
// entries, unless the source text matched by this production is not strict mode code and the
|
||||
// duplicate entries are only bound by FunctionDeclarations.
|
||||
let mut lexical_names = FxHashMap::default();
|
||||
for (name, is_fn) in lexically_declared_names_legacy(&statement_list) {
|
||||
if let Some(is_fn_previous) = lexical_names.insert(name, is_fn) {
|
||||
match (cursor.strict_mode(), is_fn, is_fn_previous) {
|
||||
(false, true, true) => {}
|
||||
_ => {
|
||||
return Err(Error::general(
|
||||
"lexical name declared multiple times",
|
||||
position,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// It is a Syntax Error if any element of the LexicallyDeclaredNames of StatementList also
|
||||
// occurs in the VarDeclaredNames of StatementList.
|
||||
for name in var_declared_names(&statement_list) {
|
||||
if lexical_names.contains_key(&name) {
|
||||
return Err(Error::general(
|
||||
"lexical name declared in var names",
|
||||
position,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(statement_list)
|
||||
}
|
||||
}
|
||||
177
javascript-engine/external/boa/boa_parser/src/parser/statement/block/tests.rs
vendored
Normal file
177
javascript-engine/external/boa/boa_parser/src/parser/statement/block/tests.rs
vendored
Normal file
@@ -0,0 +1,177 @@
|
||||
//! Block statement parsing tests.
|
||||
|
||||
use std::convert::TryInto;
|
||||
|
||||
use crate::parser::tests::check_parser;
|
||||
use boa_ast::{
|
||||
declaration::{VarDeclaration, Variable},
|
||||
expression::{
|
||||
literal::Literal,
|
||||
operator::{assign::AssignOp, unary::UnaryOp, Assign, Unary},
|
||||
Call, Identifier,
|
||||
},
|
||||
function::{FormalParameterList, Function},
|
||||
statement::{Block, Return},
|
||||
Declaration, Expression, Statement, StatementListItem,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
/// Helper function to check a block.
|
||||
#[track_caller]
|
||||
fn check_block<B>(js: &str, block: B, interner: &mut Interner)
|
||||
where
|
||||
B: Into<Box<[StatementListItem]>>,
|
||||
{
|
||||
check_parser(
|
||||
js,
|
||||
vec![Statement::Block(Block::from(block.into())).into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty() {
|
||||
check_block("{}", vec![], &mut Interner::default());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_empty() {
|
||||
let interner = &mut Interner::default();
|
||||
let a = interner.get_or_intern_static("a", utf16!("a"));
|
||||
check_block(
|
||||
r"{
|
||||
var a = 10;
|
||||
a++;
|
||||
}",
|
||||
vec![
|
||||
Statement::Var(VarDeclaration(
|
||||
vec![Variable::from_identifier(
|
||||
a.into(),
|
||||
Some(Literal::from(10).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into(),
|
||||
Statement::Expression(Expression::from(Unary::new(
|
||||
UnaryOp::IncrementPost,
|
||||
Identifier::new(a).into(),
|
||||
)))
|
||||
.into(),
|
||||
],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let hello = interner.get_or_intern_static("hello", utf16!("hello"));
|
||||
let a = interner.get_or_intern_static("a", utf16!("a"));
|
||||
check_block(
|
||||
r"{
|
||||
function hello() {
|
||||
return 10
|
||||
}
|
||||
|
||||
var a = hello();
|
||||
a++;
|
||||
}",
|
||||
vec![
|
||||
Declaration::Function(Function::new(
|
||||
Some(hello.into()),
|
||||
FormalParameterList::default(),
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(Literal::from(10).into())),
|
||||
))]
|
||||
.into(),
|
||||
))
|
||||
.into(),
|
||||
Statement::Var(VarDeclaration(
|
||||
vec![Variable::from_identifier(
|
||||
a.into(),
|
||||
Some(Call::new(Identifier::new(hello).into(), Box::default()).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into(),
|
||||
Statement::Expression(Expression::from(Unary::new(
|
||||
UnaryOp::IncrementPost,
|
||||
Identifier::new(a).into(),
|
||||
)))
|
||||
.into(),
|
||||
],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hoisting() {
|
||||
let interner = &mut Interner::default();
|
||||
let hello = interner.get_or_intern_static("hello", utf16!("hello"));
|
||||
let a = interner.get_or_intern_static("a", utf16!("a"));
|
||||
check_block(
|
||||
r"{
|
||||
var a = hello();
|
||||
a++;
|
||||
|
||||
function hello() { return 10 }
|
||||
}",
|
||||
vec![
|
||||
Declaration::Function(Function::new(
|
||||
Some(hello.into()),
|
||||
FormalParameterList::default(),
|
||||
vec![StatementListItem::Statement(Statement::Return(
|
||||
Return::new(Some(Literal::from(10).into())),
|
||||
))]
|
||||
.into(),
|
||||
))
|
||||
.into(),
|
||||
Statement::Var(VarDeclaration(
|
||||
vec![Variable::from_identifier(
|
||||
a.into(),
|
||||
Some(Call::new(Identifier::new(hello).into(), Box::default()).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into(),
|
||||
Statement::Expression(Expression::from(Unary::new(
|
||||
UnaryOp::IncrementPost,
|
||||
Identifier::new(a).into(),
|
||||
)))
|
||||
.into(),
|
||||
],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let a = interner.get_or_intern_static("a", utf16!("a"));
|
||||
check_block(
|
||||
r"{
|
||||
a = 10;
|
||||
a++;
|
||||
|
||||
var a;
|
||||
}",
|
||||
vec![
|
||||
Statement::Expression(Expression::from(Assign::new(
|
||||
AssignOp::Assign,
|
||||
Identifier::new(a).into(),
|
||||
Literal::from(10).into(),
|
||||
)))
|
||||
.into(),
|
||||
Statement::Expression(Expression::from(Unary::new(
|
||||
UnaryOp::IncrementPost,
|
||||
Identifier::new(a).into(),
|
||||
)))
|
||||
.into(),
|
||||
Statement::Var(VarDeclaration(
|
||||
vec![Variable::from_identifier(a.into(), None)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into(),
|
||||
],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
81
javascript-engine/external/boa/boa_parser/src/parser/statement/break_stm/mod.rs
vendored
Normal file
81
javascript-engine/external/boa/boa_parser/src/parser/statement/break_stm/mod.rs
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
//! Break expression parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/break
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-break-statement
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::{
|
||||
lexer::{Token, TokenKind},
|
||||
parser::{
|
||||
cursor::{Cursor, SemicolonResult},
|
||||
expression::LabelIdentifier,
|
||||
AllowAwait, AllowYield, ParseResult, TokenParser,
|
||||
},
|
||||
};
|
||||
use boa_ast::{statement::Break, Keyword, Punctuator};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Break statement parsing
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/break
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-BreakStatement
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct BreakStatement {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl BreakStatement {
|
||||
/// Creates a new `BreakStatement` parser.
|
||||
pub(super) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for BreakStatement
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Break;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("BreakStatement", "Parsing");
|
||||
cursor.expect((Keyword::Break, false), "break statement", interner)?;
|
||||
|
||||
let label = if let SemicolonResult::Found(tok) = cursor.peek_semicolon(interner)? {
|
||||
if tok.map(Token::kind) == Some(&TokenKind::Punctuator(Punctuator::Semicolon)) {
|
||||
cursor.advance(interner);
|
||||
}
|
||||
|
||||
None
|
||||
} else {
|
||||
let label = LabelIdentifier::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?
|
||||
.sym();
|
||||
cursor.expect_semicolon("break statement", interner)?;
|
||||
|
||||
Some(label)
|
||||
};
|
||||
|
||||
Ok(Break::new(label))
|
||||
}
|
||||
}
|
||||
180
javascript-engine/external/boa/boa_parser/src/parser/statement/break_stm/tests.rs
vendored
Normal file
180
javascript-engine/external/boa/boa_parser/src/parser/statement/break_stm/tests.rs
vendored
Normal file
@@ -0,0 +1,180 @@
|
||||
use crate::parser::tests::check_parser;
|
||||
use boa_ast::{
|
||||
expression::literal::Literal,
|
||||
statement::{Block, Break, WhileLoop},
|
||||
Statement, StatementListItem,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
#[test]
|
||||
fn inline() {
|
||||
check_parser(
|
||||
"while (true) break;",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Break::new(None).into(),
|
||||
))
|
||||
.into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_line() {
|
||||
check_parser(
|
||||
"while (true)
|
||||
break;",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Break::new(None).into(),
|
||||
))
|
||||
.into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inline_block_semicolon_insertion() {
|
||||
check_parser(
|
||||
"while (true) {break}",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Block::from(vec![StatementListItem::Statement(Statement::Break(
|
||||
Break::new(None),
|
||||
))])
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_line_semicolon_insertion() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"while (true) {
|
||||
break test
|
||||
}",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Block::from(vec![StatementListItem::Statement(Statement::Break(
|
||||
Break::new(Some(interner.get_or_intern_static("test", utf16!("test")))),
|
||||
))])
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inline_block() {
|
||||
check_parser(
|
||||
"while (true) {break;}",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Block::from(vec![StatementListItem::Statement(Statement::Break(
|
||||
Break::new(None),
|
||||
))])
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_line_block() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"while (true) {
|
||||
break test;
|
||||
}",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Block::from(vec![StatementListItem::Statement(Statement::Break(
|
||||
Break::new(Some(interner.get_or_intern_static("test", utf16!("test")))),
|
||||
))])
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reserved_label() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"while (true) {
|
||||
break await;
|
||||
}",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Block::from(vec![StatementListItem::Statement(Statement::Break(
|
||||
Break::new(Some(
|
||||
interner.get_or_intern_static("await", utf16!("await")),
|
||||
)),
|
||||
))])
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"while (true) {
|
||||
break yield;
|
||||
}",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Block::from(vec![StatementListItem::Statement(Statement::Break(
|
||||
Break::new(Some(
|
||||
interner.get_or_intern_static("yield", utf16!("yield")),
|
||||
)),
|
||||
))])
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_line_block_empty() {
|
||||
check_parser(
|
||||
"while (true) {
|
||||
break;
|
||||
}",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Block::from(vec![StatementListItem::Statement(Statement::Break(
|
||||
Break::new(None),
|
||||
))])
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_line_block_empty_semicolon_insertion() {
|
||||
check_parser(
|
||||
"while (true) {
|
||||
break
|
||||
}",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Block::from(vec![StatementListItem::Statement(Statement::Break(
|
||||
Break::new(None),
|
||||
))])
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
89
javascript-engine/external/boa/boa_parser/src/parser/statement/continue_stm/mod.rs
vendored
Normal file
89
javascript-engine/external/boa/boa_parser/src/parser/statement/continue_stm/mod.rs
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
//! Continue expression parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/continue
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-continue-statement
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{
|
||||
cursor::{Cursor, SemicolonResult},
|
||||
expression::LabelIdentifier,
|
||||
AllowAwait, AllowYield, ParseResult, TokenParser,
|
||||
},
|
||||
};
|
||||
use boa_ast::{statement::Continue, Keyword, Punctuator};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// For statement parsing
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/continue
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-ContinueStatement
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct ContinueStatement {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl ContinueStatement {
|
||||
/// Creates a new `ContinueStatement` parser.
|
||||
pub(super) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for ContinueStatement
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Continue;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("ContinueStatement", "Parsing");
|
||||
cursor.expect((Keyword::Continue, false), "continue statement", interner)?;
|
||||
|
||||
let label = if let SemicolonResult::Found(tok) = cursor.peek_semicolon(interner)? {
|
||||
if let Some(token) = tok {
|
||||
if token.kind() == &TokenKind::Punctuator(Punctuator::Semicolon) {
|
||||
cursor.advance(interner);
|
||||
} else if token.kind() == &TokenKind::LineTerminator {
|
||||
if let Some(token) = cursor.peek(0, interner)? {
|
||||
if token.kind() == &TokenKind::Punctuator(Punctuator::Semicolon) {
|
||||
cursor.advance(interner);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
} else {
|
||||
let label = LabelIdentifier::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?
|
||||
.sym();
|
||||
cursor.expect_semicolon("continue statement", interner)?;
|
||||
|
||||
Some(label)
|
||||
};
|
||||
|
||||
Ok(Continue::new(label))
|
||||
}
|
||||
}
|
||||
180
javascript-engine/external/boa/boa_parser/src/parser/statement/continue_stm/tests.rs
vendored
Normal file
180
javascript-engine/external/boa/boa_parser/src/parser/statement/continue_stm/tests.rs
vendored
Normal file
@@ -0,0 +1,180 @@
|
||||
use crate::parser::tests::check_parser;
|
||||
use boa_ast::{
|
||||
expression::literal::Literal,
|
||||
statement::{Block, Continue, WhileLoop},
|
||||
Statement, StatementListItem,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
#[test]
|
||||
fn inline() {
|
||||
check_parser(
|
||||
"while (true) continue;",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Continue::new(None).into(),
|
||||
))
|
||||
.into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_line() {
|
||||
check_parser(
|
||||
"while (true)
|
||||
continue;",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Continue::new(None).into(),
|
||||
))
|
||||
.into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inline_block_semicolon_insertion() {
|
||||
check_parser(
|
||||
"while (true) {continue}",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Block::from(vec![StatementListItem::Statement(Statement::Continue(
|
||||
Continue::new(None),
|
||||
))])
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_line_semicolon_insertion() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"while (true) {
|
||||
continue test
|
||||
}",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Block::from(vec![StatementListItem::Statement(Statement::Continue(
|
||||
Continue::new(Some(interner.get_or_intern_static("test", utf16!("test")))),
|
||||
))])
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inline_block() {
|
||||
check_parser(
|
||||
"while (true) {continue;}",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Block::from(vec![StatementListItem::Statement(Statement::Continue(
|
||||
Continue::new(None),
|
||||
))])
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_line_block() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"while (true) {
|
||||
continue test;
|
||||
}",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Block::from(vec![StatementListItem::Statement(Statement::Continue(
|
||||
Continue::new(Some(interner.get_or_intern_static("test", utf16!("test")))),
|
||||
))])
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reserved_label() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"while (true) {
|
||||
continue await;
|
||||
}",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Block::from(vec![StatementListItem::Statement(Statement::Continue(
|
||||
Continue::new(Some(
|
||||
interner.get_or_intern_static("await", utf16!("await")),
|
||||
)),
|
||||
))])
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"while (true) {
|
||||
continue yield;
|
||||
}",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Block::from(vec![StatementListItem::Statement(Statement::Continue(
|
||||
Continue::new(Some(
|
||||
interner.get_or_intern_static("yield", utf16!("yield")),
|
||||
)),
|
||||
))])
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_line_block_empty() {
|
||||
check_parser(
|
||||
"while (true) {
|
||||
continue;
|
||||
}",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Block::from(vec![StatementListItem::Statement(Statement::Continue(
|
||||
Continue::new(None),
|
||||
))])
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_line_block_empty_semicolon_insertion() {
|
||||
check_parser(
|
||||
"while (true) {
|
||||
continue
|
||||
}",
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Block::from(vec![StatementListItem::Statement(Statement::Continue(
|
||||
Continue::new(None),
|
||||
))])
|
||||
.into(),
|
||||
))
|
||||
.into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,98 @@
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::parser::{
|
||||
statement::declaration::hoistable::{parse_callable_declaration, CallableDeclaration},
|
||||
AllowAwait, AllowDefault, AllowYield, Cursor, ParseResult, TokenParser,
|
||||
};
|
||||
use boa_ast::{function::AsyncFunction, Keyword};
|
||||
use boa_interner::Interner;
|
||||
use std::io::Read;
|
||||
|
||||
/// Async Function declaration parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-AsyncFunctionDeclaration
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct AsyncFunctionDeclaration {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
is_default: AllowDefault,
|
||||
}
|
||||
|
||||
impl AsyncFunctionDeclaration {
|
||||
/// Creates a new `FunctionDeclaration` parser.
|
||||
pub(super) fn new<Y, A, D>(allow_yield: Y, allow_await: A, is_default: D) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
D: Into<AllowDefault>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
is_default: is_default.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CallableDeclaration for AsyncFunctionDeclaration {
|
||||
fn error_context(&self) -> &'static str {
|
||||
"async function declaration"
|
||||
}
|
||||
fn is_default(&self) -> bool {
|
||||
self.is_default.0
|
||||
}
|
||||
fn name_allow_yield(&self) -> bool {
|
||||
self.allow_yield.0
|
||||
}
|
||||
fn name_allow_await(&self) -> bool {
|
||||
self.allow_await.0
|
||||
}
|
||||
fn parameters_allow_yield(&self) -> bool {
|
||||
false
|
||||
}
|
||||
fn parameters_allow_await(&self) -> bool {
|
||||
true
|
||||
}
|
||||
fn body_allow_yield(&self) -> bool {
|
||||
false
|
||||
}
|
||||
fn body_allow_await(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for AsyncFunctionDeclaration
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = AsyncFunction;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
cursor.expect(
|
||||
(Keyword::Async, false),
|
||||
"async function declaration",
|
||||
interner,
|
||||
)?;
|
||||
cursor.peek_expect_no_lineterminator(0, "async function declaration", interner)?;
|
||||
cursor.expect(
|
||||
(Keyword::Function, false),
|
||||
"async function declaration",
|
||||
interner,
|
||||
)?;
|
||||
|
||||
let result = parse_callable_declaration(&self, cursor, interner)?;
|
||||
|
||||
Ok(AsyncFunction::new(
|
||||
Some(result.0),
|
||||
result.1,
|
||||
result.2,
|
||||
false,
|
||||
))
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
use crate::parser::tests::check_parser;
|
||||
use boa_ast::{
|
||||
function::{AsyncFunction, FormalParameterList},
|
||||
Declaration, StatementList,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
/// Async function declaration parsing.
|
||||
#[test]
|
||||
fn async_function_declaration() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"async function hello() {}",
|
||||
vec![Declaration::AsyncFunction(AsyncFunction::new(
|
||||
Some(
|
||||
interner
|
||||
.get_or_intern_static("hello", utf16!("hello"))
|
||||
.into(),
|
||||
),
|
||||
FormalParameterList::default(),
|
||||
StatementList::default(),
|
||||
false,
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Async function declaration parsing with keywords.
|
||||
#[test]
|
||||
fn async_function_declaration_keywords() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"async function yield() {}",
|
||||
vec![Declaration::AsyncFunction(AsyncFunction::new(
|
||||
Some(
|
||||
interner
|
||||
.get_or_intern_static("yield", utf16!("yield"))
|
||||
.into(),
|
||||
),
|
||||
FormalParameterList::default(),
|
||||
StatementList::default(),
|
||||
false,
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"async function await() {}",
|
||||
vec![Declaration::AsyncFunction(AsyncFunction::new(
|
||||
Some(
|
||||
interner
|
||||
.get_or_intern_static("await", utf16!("await"))
|
||||
.into(),
|
||||
),
|
||||
FormalParameterList::default(),
|
||||
StatementList::default(),
|
||||
false,
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,109 @@
|
||||
//! Async Generator Declaration parsing
|
||||
//!
|
||||
//! Implements `TokenParser` for `AsyncGeneratorDeclaration`on and outputs an `AsyncGeneratorDecl`
|
||||
//! ast node.
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::parser::{
|
||||
statement::declaration::hoistable::{parse_callable_declaration, CallableDeclaration},
|
||||
AllowAwait, AllowDefault, AllowYield, Cursor, ParseResult, TokenParser,
|
||||
};
|
||||
use boa_ast::{function::AsyncGenerator, Keyword, Punctuator};
|
||||
use boa_interner::Interner;
|
||||
use std::io::Read;
|
||||
|
||||
/// Async Generator Declaration Parser
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-AsyncGeneratorDeclaration
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct AsyncGeneratorDeclaration {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
is_default: AllowDefault,
|
||||
}
|
||||
|
||||
impl AsyncGeneratorDeclaration {
|
||||
/// Creates a new `AsyncGeneratorDeclaration` parser.
|
||||
pub(super) fn new<Y, A, D>(allow_yield: Y, allow_await: A, is_default: D) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
D: Into<AllowDefault>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
is_default: is_default.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CallableDeclaration for AsyncGeneratorDeclaration {
|
||||
fn error_context(&self) -> &'static str {
|
||||
"async generator declaration"
|
||||
}
|
||||
|
||||
fn is_default(&self) -> bool {
|
||||
self.is_default.0
|
||||
}
|
||||
|
||||
fn name_allow_yield(&self) -> bool {
|
||||
self.allow_yield.0
|
||||
}
|
||||
|
||||
fn name_allow_await(&self) -> bool {
|
||||
self.allow_await.0
|
||||
}
|
||||
|
||||
fn parameters_allow_yield(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn parameters_allow_await(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn body_allow_yield(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn body_allow_await(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for AsyncGeneratorDeclaration
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = AsyncGenerator;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
cursor.expect(
|
||||
(Keyword::Async, false),
|
||||
"async generator declaration",
|
||||
interner,
|
||||
)?;
|
||||
cursor.peek_expect_no_lineterminator(0, "async generator declaration", interner)?;
|
||||
cursor.expect(
|
||||
(Keyword::Function, false),
|
||||
"async generator declaration",
|
||||
interner,
|
||||
)?;
|
||||
cursor.expect(Punctuator::Mul, "async generator declaration", interner)?;
|
||||
|
||||
let result = parse_callable_declaration(&self, cursor, interner)?;
|
||||
|
||||
Ok(AsyncGenerator::new(
|
||||
Some(result.0),
|
||||
result.1,
|
||||
result.2,
|
||||
false,
|
||||
))
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
use crate::parser::tests::check_parser;
|
||||
use boa_ast::{
|
||||
function::{AsyncGenerator, FormalParameterList},
|
||||
Declaration, StatementList,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
#[test]
|
||||
fn async_generator_function_declaration() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"async function* gen() {}",
|
||||
vec![Declaration::AsyncGenerator(AsyncGenerator::new(
|
||||
Some(interner.get_or_intern_static("gen", utf16!("gen")).into()),
|
||||
FormalParameterList::default(),
|
||||
StatementList::default(),
|
||||
false,
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
1360
javascript-engine/external/boa/boa_parser/src/parser/statement/declaration/hoistable/class_decl/mod.rs
vendored
Normal file
1360
javascript-engine/external/boa/boa_parser/src/parser/statement/declaration/hoistable/class_decl/mod.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,92 @@
|
||||
use crate::parser::tests::check_parser;
|
||||
use boa_ast::{
|
||||
expression::literal::Literal,
|
||||
function::{Class, ClassElement, FormalParameterList, Function},
|
||||
property::{MethodDefinition, PropertyName},
|
||||
Declaration, StatementList,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
#[test]
|
||||
fn check_async_ordinary_method() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
let elements = vec![ClassElement::MethodDefinition(
|
||||
PropertyName::Literal(interner.get_or_intern_static("async", utf16!("async"))),
|
||||
MethodDefinition::Ordinary(Function::new(
|
||||
None,
|
||||
FormalParameterList::default(),
|
||||
StatementList::default(),
|
||||
)),
|
||||
)];
|
||||
|
||||
check_parser(
|
||||
"class A {
|
||||
async() { }
|
||||
}
|
||||
",
|
||||
[Declaration::Class(Class::new(
|
||||
Some(interner.get_or_intern_static("A", utf16!("A")).into()),
|
||||
None,
|
||||
None,
|
||||
elements.into(),
|
||||
true,
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_async_field_initialization() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
let elements = vec![ClassElement::FieldDefinition(
|
||||
PropertyName::Literal(interner.get_or_intern_static("async", utf16!("async"))),
|
||||
Some(Literal::from(1).into()),
|
||||
)];
|
||||
|
||||
check_parser(
|
||||
"class A {
|
||||
async
|
||||
= 1
|
||||
}
|
||||
",
|
||||
[Declaration::Class(Class::new(
|
||||
Some(interner.get_or_intern_static("A", utf16!("A")).into()),
|
||||
None,
|
||||
None,
|
||||
elements.into(),
|
||||
true,
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_async_field() {
|
||||
let interner = &mut Interner::default();
|
||||
|
||||
let elements = vec![ClassElement::FieldDefinition(
|
||||
PropertyName::Literal(interner.get_or_intern_static("async", utf16!("async"))),
|
||||
None,
|
||||
)];
|
||||
|
||||
check_parser(
|
||||
"class A {
|
||||
async
|
||||
}
|
||||
",
|
||||
[Declaration::Class(Class::new(
|
||||
Some(interner.get_or_intern_static("A", utf16!("A")).into()),
|
||||
None,
|
||||
None,
|
||||
elements.into(),
|
||||
true,
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::parser::{
|
||||
statement::declaration::hoistable::{parse_callable_declaration, CallableDeclaration},
|
||||
AllowAwait, AllowDefault, AllowYield, Cursor, ParseResult, TokenParser,
|
||||
};
|
||||
use boa_ast::{function::Function, Keyword};
|
||||
use boa_interner::Interner;
|
||||
use std::io::Read;
|
||||
|
||||
/// Function declaration parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/function
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-FunctionDeclaration
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser) struct FunctionDeclaration {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
is_default: AllowDefault,
|
||||
}
|
||||
|
||||
impl FunctionDeclaration {
|
||||
/// Creates a new `FunctionDeclaration` parser.
|
||||
pub(in crate::parser) fn new<Y, A, D>(allow_yield: Y, allow_await: A, is_default: D) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
D: Into<AllowDefault>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
is_default: is_default.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CallableDeclaration for FunctionDeclaration {
|
||||
fn error_context(&self) -> &'static str {
|
||||
"function declaration"
|
||||
}
|
||||
fn is_default(&self) -> bool {
|
||||
self.is_default.0
|
||||
}
|
||||
fn name_allow_yield(&self) -> bool {
|
||||
self.allow_yield.0
|
||||
}
|
||||
fn name_allow_await(&self) -> bool {
|
||||
self.allow_await.0
|
||||
}
|
||||
fn parameters_allow_yield(&self) -> bool {
|
||||
false
|
||||
}
|
||||
fn parameters_allow_await(&self) -> bool {
|
||||
false
|
||||
}
|
||||
fn body_allow_yield(&self) -> bool {
|
||||
false
|
||||
}
|
||||
fn body_allow_await(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for FunctionDeclaration
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Function;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
cursor.expect((Keyword::Function, false), "function declaration", interner)?;
|
||||
|
||||
let result = parse_callable_declaration(&self, cursor, interner)?;
|
||||
|
||||
Ok(Function::new(Some(result.0), result.1, result.2))
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
use crate::parser::tests::check_parser;
|
||||
use boa_ast::{
|
||||
function::{FormalParameterList, Function},
|
||||
Declaration, StatementList,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
/// Function declaration parsing.
|
||||
#[test]
|
||||
fn function_declaration() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"function hello() {}",
|
||||
vec![Declaration::Function(Function::new(
|
||||
Some(
|
||||
interner
|
||||
.get_or_intern_static("hello", utf16!("hello"))
|
||||
.into(),
|
||||
),
|
||||
FormalParameterList::default(),
|
||||
StatementList::default(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Function declaration parsing with keywords.
|
||||
#[test]
|
||||
fn function_declaration_keywords() {
|
||||
macro_rules! genast {
|
||||
($keyword:literal, $interner:expr) => {
|
||||
vec![Declaration::Function(Function::new(
|
||||
Some(
|
||||
$interner
|
||||
.get_or_intern_static($keyword, utf16!($keyword))
|
||||
.into(),
|
||||
),
|
||||
FormalParameterList::default(),
|
||||
StatementList::default(),
|
||||
))
|
||||
.into()]
|
||||
};
|
||||
}
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let ast = genast!("yield", interner);
|
||||
check_parser("function yield() {}", ast, interner);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let ast = genast!("await", interner);
|
||||
check_parser("function await() {}", ast, interner);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let ast = genast!("as", interner);
|
||||
check_parser("function as() {}", ast, interner);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let ast = genast!("async", interner);
|
||||
check_parser("function async() {}", ast, interner);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let ast = genast!("from", interner);
|
||||
check_parser("function from() {}", ast, interner);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let ast = genast!("get", interner);
|
||||
check_parser("function get() {}", ast, interner);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let ast = genast!("meta", interner);
|
||||
check_parser("function meta() {}", ast, interner);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let ast = genast!("of", interner);
|
||||
check_parser("function of() {}", ast, interner);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let ast = genast!("set", interner);
|
||||
check_parser("function set() {}", ast, interner);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let ast = genast!("target", interner);
|
||||
check_parser("function target() {}", ast, interner);
|
||||
}
|
||||
@@ -0,0 +1,88 @@
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::parser::{
|
||||
statement::declaration::hoistable::{parse_callable_declaration, CallableDeclaration},
|
||||
AllowAwait, AllowDefault, AllowYield, Cursor, ParseResult, TokenParser,
|
||||
};
|
||||
use boa_ast::{function::Generator, Keyword, Punctuator};
|
||||
use boa_interner::Interner;
|
||||
use std::io::Read;
|
||||
|
||||
/// Generator declaration parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/function*
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-GeneratorDeclaration
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct GeneratorDeclaration {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
is_default: AllowDefault,
|
||||
}
|
||||
|
||||
impl GeneratorDeclaration {
|
||||
/// Creates a new `GeneratorDeclaration` parser.
|
||||
pub(super) fn new<Y, A, D>(allow_yield: Y, allow_await: A, is_default: D) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
D: Into<AllowDefault>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
is_default: is_default.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CallableDeclaration for GeneratorDeclaration {
|
||||
fn error_context(&self) -> &'static str {
|
||||
"generator declaration"
|
||||
}
|
||||
fn is_default(&self) -> bool {
|
||||
self.is_default.0
|
||||
}
|
||||
fn name_allow_yield(&self) -> bool {
|
||||
self.allow_yield.0
|
||||
}
|
||||
fn name_allow_await(&self) -> bool {
|
||||
self.allow_await.0
|
||||
}
|
||||
fn parameters_allow_yield(&self) -> bool {
|
||||
true
|
||||
}
|
||||
fn parameters_allow_await(&self) -> bool {
|
||||
false
|
||||
}
|
||||
fn body_allow_yield(&self) -> bool {
|
||||
true
|
||||
}
|
||||
fn body_allow_await(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for GeneratorDeclaration
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Generator;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
cursor.expect(
|
||||
(Keyword::Function, false),
|
||||
"generator declaration",
|
||||
interner,
|
||||
)?;
|
||||
cursor.expect(Punctuator::Mul, "generator declaration", interner)?;
|
||||
|
||||
let result = parse_callable_declaration(&self, cursor, interner)?;
|
||||
|
||||
Ok(Generator::new(Some(result.0), result.1, result.2, false))
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
use crate::parser::tests::check_parser;
|
||||
use boa_ast::{
|
||||
function::{FormalParameterList, Generator},
|
||||
Declaration, StatementList,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
#[test]
|
||||
fn generator_function_declaration() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"function* gen() {}",
|
||||
vec![Declaration::Generator(Generator::new(
|
||||
Some(interner.get_or_intern_static("gen", utf16!("gen")).into()),
|
||||
FormalParameterList::default(),
|
||||
StatementList::default(),
|
||||
false,
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
233
javascript-engine/external/boa/boa_parser/src/parser/statement/declaration/hoistable/mod.rs
vendored
Normal file
233
javascript-engine/external/boa/boa_parser/src/parser/statement/declaration/hoistable/mod.rs
vendored
Normal file
@@ -0,0 +1,233 @@
|
||||
//! Hoistable declaration parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [spec]: https://tc39.es/ecma262/#prod-HoistableDeclaration
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
mod async_function_decl;
|
||||
mod async_generator_decl;
|
||||
mod function_decl;
|
||||
mod generator_decl;
|
||||
|
||||
pub(crate) mod class_decl;
|
||||
|
||||
use self::{
|
||||
async_function_decl::AsyncFunctionDeclaration, async_generator_decl::AsyncGeneratorDeclaration,
|
||||
class_decl::ClassDeclaration, generator_decl::GeneratorDeclaration,
|
||||
};
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{
|
||||
expression::BindingIdentifier,
|
||||
function::{FormalParameters, FunctionBody},
|
||||
name_in_lexically_declared_names,
|
||||
statement::LexError,
|
||||
AllowAwait, AllowDefault, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
expression::Identifier,
|
||||
function::FormalParameterList,
|
||||
operations::{bound_names, contains, top_level_lexically_declared_names, ContainsSymbol},
|
||||
Declaration, Keyword, Punctuator, StatementList,
|
||||
};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
pub(in crate::parser) use function_decl::FunctionDeclaration;
|
||||
|
||||
/// Hoistable declaration parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-FunctionDeclaration
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct HoistableDeclaration {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
is_default: AllowDefault,
|
||||
}
|
||||
|
||||
impl HoistableDeclaration {
|
||||
/// Creates a new `HoistableDeclaration` parser.
|
||||
pub(super) fn new<Y, A, D>(allow_yield: Y, allow_await: A, is_default: D) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
D: Into<AllowDefault>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
is_default: is_default.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for HoistableDeclaration
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Declaration;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("HoistableDeclaration", "Parsing");
|
||||
let tok = cursor.peek(0, interner).or_abrupt()?;
|
||||
|
||||
match tok.kind() {
|
||||
TokenKind::Keyword((Keyword::Function | Keyword::Async | Keyword::Class, true)) => {
|
||||
Err(Error::general(
|
||||
"Keyword must not contain escaped characters",
|
||||
tok.span().start(),
|
||||
))
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Function, false)) => {
|
||||
let next_token = cursor.peek(1, interner).or_abrupt()?;
|
||||
if let TokenKind::Punctuator(Punctuator::Mul) = next_token.kind() {
|
||||
GeneratorDeclaration::new(self.allow_yield, self.allow_await, self.is_default)
|
||||
.parse(cursor, interner)
|
||||
.map(Declaration::from)
|
||||
} else {
|
||||
FunctionDeclaration::new(self.allow_yield, self.allow_await, self.is_default)
|
||||
.parse(cursor, interner)
|
||||
.map(Declaration::from)
|
||||
}
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Async, false)) => {
|
||||
let next_token = cursor.peek(2, interner).or_abrupt()?;
|
||||
if let TokenKind::Punctuator(Punctuator::Mul) = next_token.kind() {
|
||||
AsyncGeneratorDeclaration::new(
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
self.is_default,
|
||||
)
|
||||
.parse(cursor, interner)
|
||||
.map(Declaration::from)
|
||||
} else {
|
||||
AsyncFunctionDeclaration::new(self.allow_yield, self.allow_await, false)
|
||||
.parse(cursor, interner)
|
||||
.map(Declaration::from)
|
||||
}
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Class, false)) => {
|
||||
ClassDeclaration::new(self.allow_yield, self.allow_await, false)
|
||||
.parse(cursor, interner)
|
||||
.map(Declaration::from)
|
||||
}
|
||||
_ => unreachable!("unknown token found: {:?}", tok),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait CallableDeclaration {
|
||||
fn error_context(&self) -> &'static str;
|
||||
fn is_default(&self) -> bool;
|
||||
fn name_allow_yield(&self) -> bool;
|
||||
fn name_allow_await(&self) -> bool;
|
||||
fn parameters_allow_yield(&self) -> bool;
|
||||
fn parameters_allow_await(&self) -> bool;
|
||||
fn body_allow_yield(&self) -> bool;
|
||||
fn body_allow_await(&self) -> bool;
|
||||
}
|
||||
|
||||
// This is a helper function to not duplicate code in the individual callable declaration parsers.
|
||||
fn parse_callable_declaration<R: Read, C: CallableDeclaration>(
|
||||
c: &C,
|
||||
cursor: &mut Cursor<R>,
|
||||
interner: &mut Interner,
|
||||
) -> ParseResult<(Identifier, FormalParameterList, StatementList)> {
|
||||
let token = cursor.peek(0, interner).or_abrupt()?;
|
||||
let name_span = token.span();
|
||||
let name = match token.kind() {
|
||||
TokenKind::Punctuator(Punctuator::OpenParen) => {
|
||||
if !c.is_default() {
|
||||
return Err(Error::unexpected(
|
||||
token.to_string(interner),
|
||||
token.span(),
|
||||
c.error_context(),
|
||||
));
|
||||
}
|
||||
Sym::DEFAULT.into()
|
||||
}
|
||||
_ => BindingIdentifier::new(c.name_allow_yield(), c.name_allow_await())
|
||||
.parse(cursor, interner)?,
|
||||
};
|
||||
|
||||
let params_start_position = cursor
|
||||
.expect(Punctuator::OpenParen, c.error_context(), interner)?
|
||||
.span()
|
||||
.end();
|
||||
|
||||
let params = FormalParameters::new(c.parameters_allow_yield(), c.parameters_allow_await())
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
cursor.expect(Punctuator::CloseParen, c.error_context(), interner)?;
|
||||
cursor.expect(Punctuator::OpenBlock, c.error_context(), interner)?;
|
||||
|
||||
let body =
|
||||
FunctionBody::new(c.body_allow_yield(), c.body_allow_await()).parse(cursor, interner)?;
|
||||
|
||||
cursor.expect(Punctuator::CloseBlock, c.error_context(), interner)?;
|
||||
|
||||
// Early Error: If the source code matching FormalParameters is strict mode code,
|
||||
// the Early Error rules for UniqueFormalParameters : FormalParameters are applied.
|
||||
if (cursor.strict_mode() || body.strict()) && params.has_duplicates() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Duplicate parameter name not allowed in this context".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: It is a Syntax Error if FunctionBodyContainsUseStrict of FunctionBody is true
|
||||
// and IsSimpleParameterList of FormalParameters is false.
|
||||
if body.strict() && !params.is_simple() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"Illegal 'use strict' directive in function with non-simple parameter list".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error: If BindingIdentifier is present and the source code matching BindingIdentifier is strict mode code,
|
||||
// it is a Syntax Error if the StringValue of BindingIdentifier is "eval" or "arguments".
|
||||
if (cursor.strict_mode() || body.strict()) && [Sym::EVAL, Sym::ARGUMENTS].contains(&name.sym())
|
||||
{
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"unexpected identifier 'eval' or 'arguments' in strict mode".into(),
|
||||
name_span.start(),
|
||||
)));
|
||||
}
|
||||
|
||||
// Early Error for BindingIdentifier, because the strictness of the functions body is also
|
||||
// relevant for the function parameters.
|
||||
if body.strict() && contains(¶ms, ContainsSymbol::EvalOrArguments) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"unexpected identifier 'eval' or 'arguments' in strict mode".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
// It is a Syntax Error if any element of the BoundNames of FormalParameters
|
||||
// also occurs in the LexicallyDeclaredNames of FunctionBody.
|
||||
// https://tc39.es/ecma262/#sec-function-definitions-static-semantics-early-errors
|
||||
name_in_lexically_declared_names(
|
||||
&bound_names(¶ms),
|
||||
&top_level_lexically_declared_names(&body),
|
||||
params_start_position,
|
||||
)?;
|
||||
|
||||
if contains(&body, ContainsSymbol::Super) || contains(¶ms, ContainsSymbol::Super) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"invalid super usage".into(),
|
||||
params_start_position,
|
||||
)));
|
||||
}
|
||||
|
||||
Ok((name, params, body))
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
|
||||
358
javascript-engine/external/boa/boa_parser/src/parser/statement/declaration/lexical.rs
vendored
Normal file
358
javascript-engine/external/boa/boa_parser/src/parser/statement/declaration/lexical.rs
vendored
Normal file
@@ -0,0 +1,358 @@
|
||||
//! Lexical declaration parsing.
|
||||
//!
|
||||
//! This parses `let` and `const` declarations.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-let-and-const-declarations
|
||||
|
||||
use crate::{
|
||||
lexer::{Error as LexError, TokenKind},
|
||||
parser::{
|
||||
cursor::{Cursor, SemicolonResult},
|
||||
expression::Initializer,
|
||||
statement::{ArrayBindingPattern, BindingIdentifier, ObjectBindingPattern},
|
||||
AllowAwait, AllowIn, AllowYield, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use ast::operations::bound_names;
|
||||
use boa_ast::{self as ast, declaration::Variable, pattern::Pattern, Keyword, Punctuator};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_profiler::Profiler;
|
||||
use std::{convert::TryInto, io::Read};
|
||||
|
||||
/// Parses a lexical declaration.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-LexicalDeclaration
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser) struct LexicalDeclaration {
|
||||
allow_in: AllowIn,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
loop_init: bool,
|
||||
}
|
||||
|
||||
impl LexicalDeclaration {
|
||||
/// Creates a new `LexicalDeclaration` parser.
|
||||
pub(in crate::parser) fn new<I, Y, A>(
|
||||
allow_in: I,
|
||||
allow_yield: Y,
|
||||
allow_await: A,
|
||||
loop_init: bool,
|
||||
) -> Self
|
||||
where
|
||||
I: Into<AllowIn>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_in: allow_in.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
loop_init,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for LexicalDeclaration
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::declaration::LexicalDeclaration;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("LexicalDeclaration", "Parsing");
|
||||
let tok = cursor.next(interner).or_abrupt()?;
|
||||
|
||||
let lexical_declaration = match tok.kind() {
|
||||
TokenKind::Keyword((Keyword::Const | Keyword::Let, true)) => {
|
||||
return Err(Error::general(
|
||||
"Keyword must not contain escaped characters",
|
||||
tok.span().start(),
|
||||
))
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Const, false)) => BindingList::new(
|
||||
self.allow_in,
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
true,
|
||||
self.loop_init,
|
||||
)
|
||||
.parse(cursor, interner)?,
|
||||
TokenKind::Keyword((Keyword::Let, false)) => BindingList::new(
|
||||
self.allow_in,
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
false,
|
||||
self.loop_init,
|
||||
)
|
||||
.parse(cursor, interner)?,
|
||||
_ => unreachable!("unknown token found: {:?}", tok),
|
||||
};
|
||||
|
||||
if !self.loop_init {
|
||||
cursor.expect_semicolon("lexical declaration", interner)?;
|
||||
}
|
||||
|
||||
Ok(lexical_declaration)
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a binding list.
|
||||
///
|
||||
/// It will return an error if a `const` declaration is being parsed and there is no
|
||||
/// initializer.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-BindingList
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct BindingList {
|
||||
allow_in: AllowIn,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
is_const: bool,
|
||||
loop_init: bool,
|
||||
}
|
||||
|
||||
impl BindingList {
|
||||
/// Creates a new `BindingList` parser.
|
||||
fn new<I, Y, A>(
|
||||
allow_in: I,
|
||||
allow_yield: Y,
|
||||
allow_await: A,
|
||||
is_const: bool,
|
||||
loop_init: bool,
|
||||
) -> Self
|
||||
where
|
||||
I: Into<AllowIn>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_in: allow_in.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
is_const,
|
||||
loop_init,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for BindingList
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::declaration::LexicalDeclaration;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("BindingList", "Parsing");
|
||||
|
||||
// Create vectors to store the variable declarations
|
||||
// Const and Let signatures are slightly different, Const needs definitions, Lets don't
|
||||
let mut decls = Vec::new();
|
||||
|
||||
loop {
|
||||
let decl = LexicalBinding::new(self.allow_in, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
if self.is_const {
|
||||
let init_is_some = decl.init().is_some();
|
||||
|
||||
if init_is_some || self.loop_init {
|
||||
decls.push(decl);
|
||||
} else {
|
||||
let next = cursor.next(interner).or_abrupt()?;
|
||||
return Err(Error::general(
|
||||
"Expected initializer for const declaration",
|
||||
next.span().start(),
|
||||
));
|
||||
}
|
||||
} else {
|
||||
decls.push(decl);
|
||||
}
|
||||
|
||||
match cursor.peek_semicolon(interner)? {
|
||||
SemicolonResult::Found(_) => break,
|
||||
SemicolonResult::NotFound(tk)
|
||||
if tk.kind() == &TokenKind::Keyword((Keyword::Of, true))
|
||||
|| tk.kind() == &TokenKind::Keyword((Keyword::In, true)) =>
|
||||
{
|
||||
return Err(Error::general(
|
||||
"Keyword must not contain escaped characters",
|
||||
tk.span().start(),
|
||||
));
|
||||
}
|
||||
SemicolonResult::NotFound(tk)
|
||||
if tk.kind() == &TokenKind::Keyword((Keyword::Of, false))
|
||||
|| tk.kind() == &TokenKind::Keyword((Keyword::In, false)) =>
|
||||
{
|
||||
break
|
||||
}
|
||||
SemicolonResult::NotFound(tk)
|
||||
if tk.kind() == &TokenKind::Punctuator(Punctuator::Comma) =>
|
||||
{
|
||||
// We discard the comma
|
||||
cursor.advance(interner);
|
||||
}
|
||||
SemicolonResult::NotFound(_) if self.loop_init => break,
|
||||
SemicolonResult::NotFound(_) => {
|
||||
let next = cursor.next(interner).or_abrupt()?;
|
||||
return Err(Error::expected(
|
||||
[";".to_owned(), "line terminator".to_owned()],
|
||||
next.to_string(interner),
|
||||
next.span(),
|
||||
"lexical declaration binding list",
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let decls = decls
|
||||
.try_into()
|
||||
.expect("`LexicalBinding` must return at least one variable");
|
||||
|
||||
if self.is_const {
|
||||
Ok(ast::declaration::LexicalDeclaration::Const(decls))
|
||||
} else {
|
||||
Ok(ast::declaration::LexicalDeclaration::Let(decls))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Lexical binding parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-LexicalBinding
|
||||
struct LexicalBinding {
|
||||
allow_in: AllowIn,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl LexicalBinding {
|
||||
/// Creates a new `BindingList` parser.
|
||||
fn new<I, Y, A>(allow_in: I, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
I: Into<AllowIn>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_in: allow_in.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for LexicalBinding
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Variable;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("LexicalBinding", "Parsing");
|
||||
|
||||
let peek_token = cursor.peek(0, interner).or_abrupt()?;
|
||||
let position = peek_token.span().start();
|
||||
|
||||
match peek_token.kind() {
|
||||
TokenKind::Punctuator(Punctuator::OpenBlock) => {
|
||||
let bindings = ObjectBindingPattern::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
let init = if cursor
|
||||
.peek(0, interner)?
|
||||
.filter(|t| *t.kind() == TokenKind::Punctuator(Punctuator::Assign))
|
||||
.is_some()
|
||||
{
|
||||
Some(
|
||||
Initializer::new(None, self.allow_in, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let declaration = Pattern::Object(bindings.into());
|
||||
|
||||
if bound_names(&declaration).contains(&Sym::LET.into()) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"'let' is disallowed as a lexically bound name".into(),
|
||||
position,
|
||||
)));
|
||||
}
|
||||
|
||||
Ok(Variable::from_pattern(declaration, init))
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenBracket) => {
|
||||
let bindings = ArrayBindingPattern::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
let init = if cursor
|
||||
.peek(0, interner)?
|
||||
.filter(|t| *t.kind() == TokenKind::Punctuator(Punctuator::Assign))
|
||||
.is_some()
|
||||
{
|
||||
Some(
|
||||
Initializer::new(None, self.allow_in, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let declaration = Pattern::Array(bindings.into());
|
||||
|
||||
if bound_names(&declaration).contains(&Sym::LET.into()) {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"'let' is disallowed as a lexically bound name".into(),
|
||||
position,
|
||||
)));
|
||||
}
|
||||
|
||||
Ok(Variable::from_pattern(declaration, init))
|
||||
}
|
||||
_ => {
|
||||
let ident = BindingIdentifier::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
if ident == Sym::LET {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"'let' is disallowed as a lexically bound name".into(),
|
||||
position,
|
||||
)));
|
||||
}
|
||||
|
||||
let init = if cursor
|
||||
.peek(0, interner)?
|
||||
.filter(|t| *t.kind() == TokenKind::Punctuator(Punctuator::Assign))
|
||||
.is_some()
|
||||
{
|
||||
Some(
|
||||
Initializer::new(
|
||||
Some(ident),
|
||||
self.allow_in,
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(Variable::from_identifier(ident, init))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
77
javascript-engine/external/boa/boa_parser/src/parser/statement/declaration/mod.rs
vendored
Normal file
77
javascript-engine/external/boa/boa_parser/src/parser/statement/declaration/mod.rs
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
//! Declaration parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements#Declarations
|
||||
//! [spec]:https://tc39.es/ecma262/#sec-declarations-and-the-variable-statement
|
||||
|
||||
mod hoistable;
|
||||
mod lexical;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
pub(in crate::parser) use hoistable::class_decl::ClassTail;
|
||||
pub(in crate::parser) use hoistable::FunctionDeclaration;
|
||||
use hoistable::HoistableDeclaration;
|
||||
pub(in crate::parser) use lexical::LexicalDeclaration;
|
||||
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{AllowAwait, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser},
|
||||
};
|
||||
use boa_ast::{self as ast, Keyword};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Parses a declaration.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-Declaration
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct Declaration {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl Declaration {
|
||||
pub(super) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for Declaration
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::Declaration;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("Declaration", "Parsing");
|
||||
let tok = cursor.peek(0, interner).or_abrupt()?;
|
||||
|
||||
match tok.kind() {
|
||||
TokenKind::Keyword((Keyword::Function | Keyword::Async | Keyword::Class, _)) => {
|
||||
HoistableDeclaration::new(self.allow_yield, self.allow_await, false)
|
||||
.parse(cursor, interner)
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Const | Keyword::Let, _)) => {
|
||||
LexicalDeclaration::new(true, self.allow_yield, self.allow_await, false)
|
||||
.parse(cursor, interner)
|
||||
.map(Into::into)
|
||||
}
|
||||
_ => unreachable!("unknown token found: {:?}", tok),
|
||||
}
|
||||
}
|
||||
}
|
||||
365
javascript-engine/external/boa/boa_parser/src/parser/statement/declaration/tests.rs
vendored
Normal file
365
javascript-engine/external/boa/boa_parser/src/parser/statement/declaration/tests.rs
vendored
Normal file
@@ -0,0 +1,365 @@
|
||||
use std::convert::TryInto;
|
||||
|
||||
use crate::parser::tests::{check_invalid, check_parser};
|
||||
use boa_ast::{
|
||||
declaration::{LexicalDeclaration, VarDeclaration, Variable},
|
||||
expression::literal::Literal,
|
||||
Declaration, Statement,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
/// Checks `var` declaration parsing.
|
||||
#[test]
|
||||
fn var_declaration() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"var a = 5;",
|
||||
vec![Statement::Var(VarDeclaration(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
Some(Literal::from(5).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks `var` declaration parsing with reserved words.
|
||||
#[test]
|
||||
fn var_declaration_keywords() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"var yield = 5;",
|
||||
vec![Statement::Var(VarDeclaration(
|
||||
vec![Variable::from_identifier(
|
||||
interner
|
||||
.get_or_intern_static("yield", utf16!("yield"))
|
||||
.into(),
|
||||
Some(Literal::from(5).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"var await = 5;",
|
||||
vec![Statement::Var(VarDeclaration(
|
||||
vec![Variable::from_identifier(
|
||||
interner
|
||||
.get_or_intern_static("await", utf16!("await"))
|
||||
.into(),
|
||||
Some(Literal::from(5).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks `var` declaration parsing with no spaces.
|
||||
#[test]
|
||||
fn var_declaration_no_spaces() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"var a=5;",
|
||||
vec![Statement::Var(VarDeclaration(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
Some(Literal::from(5).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks empty `var` declaration parsing.
|
||||
#[test]
|
||||
fn empty_var_declaration() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"var a;",
|
||||
vec![Statement::Var(VarDeclaration(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
None,
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks multiple `var` declarations.
|
||||
#[test]
|
||||
fn multiple_var_declaration() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"var a = 5, b, c = 6;",
|
||||
vec![Statement::Var(VarDeclaration(
|
||||
vec![
|
||||
Variable::from_identifier(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
Some(Literal::from(5).into()),
|
||||
),
|
||||
Variable::from_identifier(
|
||||
interner.get_or_intern_static("b", utf16!("b")).into(),
|
||||
None,
|
||||
),
|
||||
Variable::from_identifier(
|
||||
interner.get_or_intern_static("c", utf16!("c")).into(),
|
||||
Some(Literal::from(6).into()),
|
||||
),
|
||||
]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks `let` declaration parsing.
|
||||
#[test]
|
||||
fn let_declaration() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"let a = 5;",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Let(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
Some(Literal::from(5).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks `let` declaration parsing with reserved words.
|
||||
#[test]
|
||||
fn let_declaration_keywords() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"let yield = 5;",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Let(
|
||||
vec![Variable::from_identifier(
|
||||
interner
|
||||
.get_or_intern_static("yield", utf16!("yield"))
|
||||
.into(),
|
||||
Some(Literal::from(5).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"let await = 5;",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Let(
|
||||
vec![Variable::from_identifier(
|
||||
interner
|
||||
.get_or_intern_static("await", utf16!("await"))
|
||||
.into(),
|
||||
Some(Literal::from(5).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks `let` declaration parsing with no spaces.
|
||||
#[test]
|
||||
fn let_declaration_no_spaces() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"let a=5;",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Let(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
Some(Literal::from(5).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks empty `let` declaration parsing.
|
||||
#[test]
|
||||
fn empty_let_declaration() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"let a;",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Let(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
None,
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks multiple `let` declarations.
|
||||
#[test]
|
||||
fn multiple_let_declaration() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"let a = 5, b, c = 6;",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Let(
|
||||
vec![
|
||||
Variable::from_identifier(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
Some(Literal::from(5).into()),
|
||||
),
|
||||
Variable::from_identifier(
|
||||
interner.get_or_intern_static("b", utf16!("b")).into(),
|
||||
None,
|
||||
),
|
||||
Variable::from_identifier(
|
||||
interner.get_or_intern_static("c", utf16!("c")).into(),
|
||||
Some(Literal::from(6).into()),
|
||||
),
|
||||
]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks `const` declaration parsing.
|
||||
#[test]
|
||||
fn const_declaration() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"const a = 5;",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
Some(Literal::from(5).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks `const` declaration parsing with reserved words.
|
||||
#[test]
|
||||
fn const_declaration_keywords() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"const yield = 5;",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner
|
||||
.get_or_intern_static("yield", utf16!("yield"))
|
||||
.into(),
|
||||
Some(Literal::from(5).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"const await = 5;",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner
|
||||
.get_or_intern_static("await", utf16!("await"))
|
||||
.into(),
|
||||
Some(Literal::from(5).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks `const` declaration parsing with no spaces.
|
||||
#[test]
|
||||
fn const_declaration_no_spaces() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"const a=5;",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
Some(Literal::from(5).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks empty `const` declaration parsing.
|
||||
#[test]
|
||||
fn empty_const_declaration() {
|
||||
check_invalid("const a;");
|
||||
}
|
||||
|
||||
/// Checks multiple `const` declarations.
|
||||
#[test]
|
||||
fn multiple_const_declaration() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"const a = 5, c = 6;",
|
||||
vec![Declaration::Lexical(LexicalDeclaration::Const(
|
||||
vec![
|
||||
Variable::from_identifier(
|
||||
interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
Some(Literal::from(5).into()),
|
||||
),
|
||||
Variable::from_identifier(
|
||||
interner.get_or_intern_static("c", utf16!("c")).into(),
|
||||
Some(Literal::from(6).into()),
|
||||
),
|
||||
]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
102
javascript-engine/external/boa/boa_parser/src/parser/statement/expression/mod.rs
vendored
Normal file
102
javascript-engine/external/boa/boa_parser/src/parser/statement/expression/mod.rs
vendored
Normal file
@@ -0,0 +1,102 @@
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{
|
||||
expression::Expression, AllowAwait, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{Keyword, Punctuator, Statement};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Expression statement parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-ExpressionStatement
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser::statement) struct ExpressionStatement {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl ExpressionStatement {
|
||||
/// Creates a new `ExpressionStatement` parser.
|
||||
pub(in crate::parser::statement) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for ExpressionStatement
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Statement;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("ExpressionStatement", "Parsing");
|
||||
|
||||
let next_token = cursor.peek(0, interner).or_abrupt()?;
|
||||
match next_token.kind() {
|
||||
TokenKind::Keyword((
|
||||
Keyword::Function | Keyword::Class | Keyword::Async | Keyword::Let,
|
||||
true,
|
||||
)) => {
|
||||
return Err(Error::general(
|
||||
"Keyword must not contain escaped characters",
|
||||
next_token.span().start(),
|
||||
));
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Function | Keyword::Class, false)) => {
|
||||
return Err(Error::general(
|
||||
"expected statement",
|
||||
next_token.span().start(),
|
||||
));
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Async, false)) => {
|
||||
let next_token = cursor.peek(1, interner).or_abrupt()?;
|
||||
match next_token.kind() {
|
||||
TokenKind::Keyword((Keyword::Function, true)) => {
|
||||
return Err(Error::general(
|
||||
"Keyword must not contain escaped characters",
|
||||
next_token.span().start(),
|
||||
));
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Function, false)) => {
|
||||
return Err(Error::general(
|
||||
"expected statement",
|
||||
next_token.span().start(),
|
||||
));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Let, false)) => {
|
||||
let next_token = cursor.peek(1, interner).or_abrupt()?;
|
||||
if next_token.kind() == &TokenKind::Punctuator(Punctuator::OpenBracket) {
|
||||
return Err(Error::general(
|
||||
"expected statement",
|
||||
next_token.span().start(),
|
||||
));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let expr = Expression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
cursor.expect_semicolon("expression statement", interner)?;
|
||||
|
||||
Ok(expr.into())
|
||||
}
|
||||
}
|
||||
158
javascript-engine/external/boa/boa_parser/src/parser/statement/if_stm/mod.rs
vendored
Normal file
158
javascript-engine/external/boa/boa_parser/src/parser/statement/if_stm/mod.rs
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{
|
||||
expression::Expression,
|
||||
statement::{declaration::FunctionDeclaration, Statement},
|
||||
AllowAwait, AllowReturn, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
statement::{Block, If},
|
||||
Declaration, Keyword, Punctuator, StatementListItem,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// If statement parsing.
|
||||
///
|
||||
/// An `if` statement will have a condition, a block statement, and an optional `else` statement.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/if...else
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-IfStatement
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct IfStatement {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
allow_return: AllowReturn,
|
||||
}
|
||||
|
||||
impl IfStatement {
|
||||
/// Creates a new `IfStatement` parser.
|
||||
pub(super) fn new<Y, A, R>(allow_yield: Y, allow_await: A, allow_return: R) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
R: Into<AllowReturn>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
allow_return: allow_return.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for IfStatement
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = If;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("IfStatement", "Parsing");
|
||||
|
||||
cursor.expect((Keyword::If, false), "if statement", interner)?;
|
||||
cursor.expect(Punctuator::OpenParen, "if statement", interner)?;
|
||||
|
||||
let condition = Expression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
let position = cursor
|
||||
.expect(Punctuator::CloseParen, "if statement", interner)?
|
||||
.span()
|
||||
.end();
|
||||
|
||||
let strict = cursor.strict_mode();
|
||||
let token = cursor.peek(0, interner).or_abrupt()?;
|
||||
let then_node = match token.kind() {
|
||||
TokenKind::Keyword((Keyword::Function, _)) => {
|
||||
// FunctionDeclarations in IfStatement Statement Clauses
|
||||
// https://tc39.es/ecma262/#sec-functiondeclarations-in-ifstatement-statement-clauses
|
||||
if strict {
|
||||
// This production only applies when parsing non-strict code.
|
||||
return Err(Error::wrong_function_declaration_non_strict(position));
|
||||
}
|
||||
// Source text matched by this production is processed as if each matching
|
||||
// occurrence of FunctionDeclaration[?Yield, ?Await, ~Default] was the sole
|
||||
// StatementListItem of a BlockStatement occupying that position in the source text.
|
||||
Block::from(vec![StatementListItem::Declaration(Declaration::Function(
|
||||
FunctionDeclaration::new(self.allow_yield, self.allow_await, false)
|
||||
.parse(cursor, interner)?,
|
||||
))])
|
||||
.into()
|
||||
}
|
||||
_ => Statement::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)?,
|
||||
};
|
||||
|
||||
// Early Error: It is a Syntax Error if IsLabelledFunction(the first Statement) is true.
|
||||
if then_node.is_labelled_function() {
|
||||
return Err(Error::wrong_labelled_function_declaration(position));
|
||||
}
|
||||
|
||||
let else_stmt = if let Some(token) = cursor.peek(0, interner)? {
|
||||
match token.kind() {
|
||||
TokenKind::Keyword((Keyword::Else, true)) => {
|
||||
return Err(Error::general(
|
||||
"Keyword must not contain escaped characters",
|
||||
token.span().start(),
|
||||
));
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Else, false)) => {
|
||||
cursor.advance(interner);
|
||||
|
||||
let strict = cursor.strict_mode();
|
||||
let token = cursor.peek(0, interner).or_abrupt()?;
|
||||
let position = token.span().start();
|
||||
let stmt = match token.kind() {
|
||||
TokenKind::Keyword((Keyword::Function, _)) => {
|
||||
// FunctionDeclarations in IfStatement Statement Clauses
|
||||
// https://tc39.es/ecma262/#sec-functiondeclarations-in-ifstatement-statement-clauses
|
||||
if strict {
|
||||
return Err(Error::wrong_function_declaration_non_strict(position));
|
||||
}
|
||||
|
||||
// Source text matched by this production is processed as if each matching
|
||||
// occurrence of FunctionDeclaration[?Yield, ?Await, ~Default] was the sole
|
||||
// StatementListItem of a BlockStatement occupying that position in the source text.
|
||||
Block::from(vec![StatementListItem::Declaration(
|
||||
Declaration::Function(
|
||||
FunctionDeclaration::new(
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
false,
|
||||
)
|
||||
.parse(cursor, interner)?,
|
||||
),
|
||||
)])
|
||||
.into()
|
||||
}
|
||||
_ => Statement::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)?,
|
||||
};
|
||||
|
||||
// Early Error: It is a Syntax Error if IsLabelledFunction(the second Statement) is true.
|
||||
if stmt.is_labelled_function() {
|
||||
return Err(Error::wrong_labelled_function_declaration(position));
|
||||
}
|
||||
|
||||
Some(stmt)
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(If::new(condition, then_node, else_stmt))
|
||||
}
|
||||
}
|
||||
35
javascript-engine/external/boa/boa_parser/src/parser/statement/if_stm/tests.rs
vendored
Normal file
35
javascript-engine/external/boa/boa_parser/src/parser/statement/if_stm/tests.rs
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
use crate::parser::tests::check_parser;
|
||||
use boa_ast::{
|
||||
expression::literal::Literal,
|
||||
statement::{Block, If},
|
||||
Statement,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
|
||||
#[test]
|
||||
fn if_without_else_block() {
|
||||
check_parser(
|
||||
"if (true) {}",
|
||||
vec![Statement::If(If::new(
|
||||
Literal::from(true).into(),
|
||||
Block::from(Vec::new()).into(),
|
||||
None,
|
||||
))
|
||||
.into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn if_without_else_block_with_trailing_newline() {
|
||||
check_parser(
|
||||
"if (true) {}\n",
|
||||
vec![Statement::If(If::new(
|
||||
Literal::from(true).into(),
|
||||
Block::from(Vec::new()).into(),
|
||||
None,
|
||||
))
|
||||
.into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
118
javascript-engine/external/boa/boa_parser/src/parser/statement/iteration/do_while_statement.rs
vendored
Normal file
118
javascript-engine/external/boa/boa_parser/src/parser/statement/iteration/do_while_statement.rs
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
//! Do-while statement parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/do...while
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-do-while-statement
|
||||
|
||||
use crate::{
|
||||
lexer::{Token, TokenKind},
|
||||
parser::{
|
||||
expression::Expression, statement::Statement, AllowAwait, AllowReturn, AllowYield, Cursor,
|
||||
OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{statement::DoWhileLoop, Keyword, Punctuator};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Do...while statement parsing
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/do...while
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-do-while-statement
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser::statement) struct DoWhileStatement {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
allow_return: AllowReturn,
|
||||
}
|
||||
|
||||
impl DoWhileStatement {
|
||||
/// Creates a new `DoWhileStatement` parser.
|
||||
pub(in crate::parser::statement) fn new<Y, A, R>(
|
||||
allow_yield: Y,
|
||||
allow_await: A,
|
||||
allow_return: R,
|
||||
) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
R: Into<AllowReturn>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
allow_return: allow_return.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for DoWhileStatement
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = DoWhileLoop;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("DoWhileStatement", "Parsing");
|
||||
|
||||
cursor.expect((Keyword::Do, false), "do while statement", interner)?;
|
||||
|
||||
let position = cursor.peek(0, interner).or_abrupt()?.span().start();
|
||||
|
||||
let body = Statement::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
// Early Error: It is a Syntax Error if IsLabelledFunction(Statement) is true.
|
||||
if body.is_labelled_function() {
|
||||
return Err(Error::wrong_labelled_function_declaration(position));
|
||||
}
|
||||
|
||||
let next_token = cursor.peek(0, interner).or_abrupt()?;
|
||||
match next_token.kind() {
|
||||
TokenKind::Keyword((Keyword::While, true)) => {
|
||||
return Err(Error::general(
|
||||
"Keyword must not contain escaped characters",
|
||||
next_token.span().start(),
|
||||
));
|
||||
}
|
||||
TokenKind::Keyword((Keyword::While, false)) => {}
|
||||
_ => {
|
||||
return Err(Error::expected(
|
||||
["while".to_owned()],
|
||||
next_token.to_string(interner),
|
||||
next_token.span(),
|
||||
"do while statement",
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
cursor.expect((Keyword::While, false), "do while statement", interner)?;
|
||||
|
||||
cursor.expect(Punctuator::OpenParen, "do while statement", interner)?;
|
||||
|
||||
let cond = Expression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
cursor.expect(Punctuator::CloseParen, "do while statement", interner)?;
|
||||
|
||||
// Here, we only care to read the next token if it's a semicolon. If it's not, we
|
||||
// automatically "enter" or assume a semicolon, since we have just read the `)` token:
|
||||
// https://tc39.es/ecma262/#sec-automatic-semicolon-insertion
|
||||
if cursor.peek(0, interner)?.map(Token::kind)
|
||||
== Some(&TokenKind::Punctuator(Punctuator::Semicolon))
|
||||
{
|
||||
cursor.advance(interner);
|
||||
}
|
||||
|
||||
Ok(DoWhileLoop::new(body, cond))
|
||||
}
|
||||
}
|
||||
366
javascript-engine/external/boa/boa_parser/src/parser/statement/iteration/for_statement.rs
vendored
Normal file
366
javascript-engine/external/boa/boa_parser/src/parser/statement/iteration/for_statement.rs
vendored
Normal file
@@ -0,0 +1,366 @@
|
||||
//! For statement parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-for-statement
|
||||
|
||||
use crate::{
|
||||
lexer::{Error as LexError, TokenKind},
|
||||
parser::{
|
||||
expression::Expression,
|
||||
statement::declaration::LexicalDeclaration,
|
||||
statement::{variable::VariableDeclarationList, Statement},
|
||||
AllowAwait, AllowReturn, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use ast::operations::{bound_names, var_declared_names};
|
||||
use boa_ast::{
|
||||
self as ast,
|
||||
statement::{
|
||||
iteration::{ForLoopInitializer, IterableLoopInitializer},
|
||||
ForInLoop, ForLoop, ForOfLoop,
|
||||
},
|
||||
Keyword, Position, Punctuator,
|
||||
};
|
||||
use boa_interner::{Interner, Sym};
|
||||
use boa_profiler::Profiler;
|
||||
use rustc_hash::FxHashSet;
|
||||
use std::io::Read;
|
||||
|
||||
/// For statement parsing
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-for-statement
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser::statement) struct ForStatement {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
allow_return: AllowReturn,
|
||||
}
|
||||
|
||||
impl ForStatement {
|
||||
/// Creates a new `ForStatement` parser.
|
||||
pub(in crate::parser::statement) fn new<Y, A, R>(
|
||||
allow_yield: Y,
|
||||
allow_await: A,
|
||||
allow_return: R,
|
||||
) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
R: Into<AllowReturn>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
allow_return: allow_return.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for ForStatement
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::Statement;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("ForStatement", "Parsing");
|
||||
cursor.expect((Keyword::For, false), "for statement", interner)?;
|
||||
|
||||
let mut r#await = false;
|
||||
|
||||
let next = cursor.next(interner).or_abrupt()?;
|
||||
let init_position = match next.kind() {
|
||||
TokenKind::Punctuator(Punctuator::OpenParen) => next.span().end(),
|
||||
TokenKind::Keyword((Keyword::Await, _)) if !self.allow_await.0 => {
|
||||
return Err(Error::unexpected(
|
||||
next.to_string(interner),
|
||||
next.span(),
|
||||
"for await...of is only valid in async functions or async generators",
|
||||
));
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Await, _)) => {
|
||||
r#await = true;
|
||||
cursor
|
||||
.expect(Punctuator::OpenParen, "for await...of", interner)?
|
||||
.span()
|
||||
.end()
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::unexpected(
|
||||
next.to_string(interner),
|
||||
next.span(),
|
||||
"for statement",
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let init = match cursor.peek(0, interner).or_abrupt()?.kind() {
|
||||
TokenKind::Keyword((Keyword::Var, _)) => {
|
||||
cursor.advance(interner);
|
||||
Some(
|
||||
VariableDeclarationList::new(false, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Let | Keyword::Const, _)) => Some(
|
||||
LexicalDeclaration::new(false, self.allow_yield, self.allow_await, true)
|
||||
.parse(cursor, interner)?
|
||||
.into(),
|
||||
),
|
||||
TokenKind::Keyword((Keyword::Async, false)) => {
|
||||
match cursor.peek(1, interner).or_abrupt()?.kind() {
|
||||
TokenKind::Keyword((Keyword::Of, _)) => {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"invalid left-hand side expression 'async' of a for-of loop".into(),
|
||||
init_position,
|
||||
)));
|
||||
}
|
||||
_ => Some(
|
||||
Expression::new(None, false, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?
|
||||
.into(),
|
||||
),
|
||||
}
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Semicolon) => None,
|
||||
_ => Some(
|
||||
Expression::new(None, false, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?
|
||||
.into(),
|
||||
),
|
||||
};
|
||||
|
||||
let token = cursor.peek(0, interner).or_abrupt()?;
|
||||
let position = token.span().start();
|
||||
let init = match (init, token.kind()) {
|
||||
(Some(_), TokenKind::Keyword((Keyword::In | Keyword::Of, true))) => {
|
||||
return Err(Error::general(
|
||||
"Keyword must not contain escaped characters",
|
||||
position,
|
||||
));
|
||||
}
|
||||
(Some(_), TokenKind::Keyword((Keyword::In, false))) if r#await => {
|
||||
return Err(Error::general(
|
||||
"`await` can only be used in a `for await .. of` loop",
|
||||
position,
|
||||
));
|
||||
}
|
||||
(Some(init), TokenKind::Keyword((kw @ (Keyword::In | Keyword::Of), false))) => {
|
||||
let kw = *kw;
|
||||
let init =
|
||||
initializer_to_iterable_loop_initializer(init, position, cursor.strict_mode())?;
|
||||
|
||||
cursor.advance(interner);
|
||||
let expr = Expression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
cursor.expect(Punctuator::CloseParen, "for in/of statement", interner)?;
|
||||
|
||||
let position = cursor.peek(0, interner).or_abrupt()?.span().start();
|
||||
|
||||
let body = Statement::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
// Early Error: It is a Syntax Error if IsLabelledFunction(Statement) is true.
|
||||
if body.is_labelled_function() {
|
||||
return Err(Error::wrong_labelled_function_declaration(position));
|
||||
}
|
||||
|
||||
// Checks are only applicable to lexical bindings.
|
||||
if matches!(
|
||||
&init,
|
||||
IterableLoopInitializer::Const(_) | IterableLoopInitializer::Let(_)
|
||||
) {
|
||||
// It is a Syntax Error if the BoundNames of ForDeclaration contains "let".
|
||||
// It is a Syntax Error if any element of the BoundNames of ForDeclaration also occurs in the VarDeclaredNames of Statement.
|
||||
// It is a Syntax Error if the BoundNames of ForDeclaration contains any duplicate entries.
|
||||
let vars = var_declared_names(&body);
|
||||
let mut names = FxHashSet::default();
|
||||
for name in bound_names(&init) {
|
||||
if name == Sym::LET {
|
||||
return Err(Error::general(
|
||||
"Cannot use 'let' as a lexically bound name",
|
||||
position,
|
||||
));
|
||||
}
|
||||
if vars.contains(&name) {
|
||||
return Err(Error::general(
|
||||
"For loop initializer declared in loop body",
|
||||
position,
|
||||
));
|
||||
}
|
||||
if !names.insert(name) {
|
||||
return Err(Error::general(
|
||||
"For loop initializer cannot contain duplicate identifiers",
|
||||
position,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
return Ok(if kw == Keyword::In {
|
||||
ForInLoop::new(init, expr, body).into()
|
||||
} else {
|
||||
ForOfLoop::new(init, expr, body, r#await).into()
|
||||
});
|
||||
}
|
||||
(init, _) => init,
|
||||
};
|
||||
|
||||
if let Some(ForLoopInitializer::Lexical(ast::declaration::LexicalDeclaration::Const(
|
||||
ref list,
|
||||
))) = init
|
||||
{
|
||||
for decl in list.as_ref() {
|
||||
if decl.init().is_none() {
|
||||
return Err(Error::general(
|
||||
"Expected initializer for const declaration",
|
||||
position,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cursor.expect(Punctuator::Semicolon, "for statement", interner)?;
|
||||
|
||||
let cond = if cursor.next_if(Punctuator::Semicolon, interner)?.is_some() {
|
||||
None
|
||||
} else {
|
||||
let step = Expression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
cursor.expect(Punctuator::Semicolon, "for statement", interner)?;
|
||||
Some(step)
|
||||
};
|
||||
|
||||
let step = if cursor.next_if(Punctuator::CloseParen, interner)?.is_some() {
|
||||
None
|
||||
} else {
|
||||
let step = Expression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::CloseParen),
|
||||
"for statement",
|
||||
interner,
|
||||
)?;
|
||||
Some(step)
|
||||
};
|
||||
|
||||
let position = cursor.peek(0, interner).or_abrupt()?.span().start();
|
||||
|
||||
let body = Statement::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
// Early Error: It is a Syntax Error if IsLabelledFunction(Statement) is true.
|
||||
if body.is_labelled_function() {
|
||||
return Err(Error::wrong_labelled_function_declaration(position));
|
||||
}
|
||||
|
||||
// Early Error: It is a Syntax Error if any element of the BoundNames of
|
||||
// LexicalDeclaration also occurs in the VarDeclaredNames of Statement.
|
||||
// Note: only applies to lexical bindings.
|
||||
if let Some(ForLoopInitializer::Lexical(ref decl)) = init {
|
||||
let vars = var_declared_names(&body);
|
||||
for name in bound_names(decl) {
|
||||
if vars.contains(&name) {
|
||||
return Err(Error::general(
|
||||
"For loop initializer declared in loop body",
|
||||
position,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(ForLoop::new(init, cond, step, body).into())
|
||||
}
|
||||
}
|
||||
|
||||
fn initializer_to_iterable_loop_initializer(
|
||||
initializer: ForLoopInitializer,
|
||||
position: Position,
|
||||
strict: bool,
|
||||
) -> ParseResult<IterableLoopInitializer> {
|
||||
match initializer {
|
||||
ForLoopInitializer::Expression(expr) => match expr {
|
||||
ast::Expression::Identifier(ident)
|
||||
if strict && [Sym::EVAL, Sym::ARGUMENTS].contains(&ident.sym()) =>
|
||||
{
|
||||
Err(Error::lex(LexError::Syntax(
|
||||
"cannot use `eval` or `arguments` as iterable loop variable in strict code"
|
||||
.into(),
|
||||
position,
|
||||
)))
|
||||
}
|
||||
ast::Expression::Identifier(ident) => Ok(IterableLoopInitializer::Identifier(ident)),
|
||||
ast::Expression::ArrayLiteral(array) => array
|
||||
.to_pattern(strict)
|
||||
.ok_or(Error::General {
|
||||
message: "invalid array destructuring pattern in iterable loop initializer",
|
||||
position,
|
||||
})
|
||||
.map(|arr| IterableLoopInitializer::Pattern(arr.into())),
|
||||
ast::Expression::ObjectLiteral(object) => object
|
||||
.to_pattern(strict)
|
||||
.ok_or(Error::General {
|
||||
message: "invalid object destructuring pattern in iterable loop initializer",
|
||||
position,
|
||||
})
|
||||
.map(|obj| IterableLoopInitializer::Pattern(obj.into())),
|
||||
ast::Expression::PropertyAccess(access) => Ok(IterableLoopInitializer::Access(access)),
|
||||
_ => Err(Error::lex(LexError::Syntax(
|
||||
"invalid variable for iterable loop".into(),
|
||||
position,
|
||||
))),
|
||||
},
|
||||
ForLoopInitializer::Lexical(decl) => match decl.variable_list().as_ref() {
|
||||
[declaration] => {
|
||||
if declaration.init().is_some() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"a declaration in the head of a for-of loop can't have an initializer"
|
||||
.into(),
|
||||
position,
|
||||
)));
|
||||
}
|
||||
Ok(match decl {
|
||||
ast::declaration::LexicalDeclaration::Const(_) => {
|
||||
IterableLoopInitializer::Const(declaration.binding().clone())
|
||||
}
|
||||
ast::declaration::LexicalDeclaration::Let(_) => {
|
||||
IterableLoopInitializer::Let(declaration.binding().clone())
|
||||
}
|
||||
})
|
||||
}
|
||||
_ => Err(Error::lex(LexError::Syntax(
|
||||
"only one variable can be declared in the head of a for-of loop".into(),
|
||||
position,
|
||||
))),
|
||||
},
|
||||
ForLoopInitializer::Var(decl) => match decl.0.as_ref() {
|
||||
[declaration] => {
|
||||
// TODO: implement initializers in ForIn heads
|
||||
// https://tc39.es/ecma262/#sec-initializers-in-forin-statement-heads
|
||||
if declaration.init().is_some() {
|
||||
return Err(Error::lex(LexError::Syntax(
|
||||
"a declaration in the head of a for-of loop can't have an initializer"
|
||||
.into(),
|
||||
position,
|
||||
)));
|
||||
}
|
||||
Ok(IterableLoopInitializer::Var(declaration.binding().clone()))
|
||||
}
|
||||
_ => Err(Error::lex(LexError::Syntax(
|
||||
"only one variable can be declared in the head of a for-of loop".into(),
|
||||
position,
|
||||
))),
|
||||
},
|
||||
}
|
||||
}
|
||||
10
javascript-engine/external/boa/boa_parser/src/parser/statement/iteration/mod.rs
vendored
Normal file
10
javascript-engine/external/boa/boa_parser/src/parser/statement/iteration/mod.rs
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
mod do_while_statement;
|
||||
mod for_statement;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
mod while_statement;
|
||||
|
||||
pub(super) use self::{
|
||||
do_while_statement::DoWhileStatement, for_statement::ForStatement,
|
||||
while_statement::WhileStatement,
|
||||
};
|
||||
252
javascript-engine/external/boa/boa_parser/src/parser/statement/iteration/tests.rs
vendored
Normal file
252
javascript-engine/external/boa/boa_parser/src/parser/statement/iteration/tests.rs
vendored
Normal file
@@ -0,0 +1,252 @@
|
||||
use crate::parser::tests::{check_invalid, check_parser};
|
||||
use boa_ast::{
|
||||
declaration::{VarDeclaration, Variable},
|
||||
expression::{
|
||||
access::SimplePropertyAccess,
|
||||
literal::Literal,
|
||||
operator::{assign::AssignOp, binary::RelationalOp, unary::UnaryOp, Assign, Binary, Unary},
|
||||
Call, Identifier,
|
||||
},
|
||||
statement::{Block, Break, DoWhileLoop, WhileLoop},
|
||||
Expression, Statement, StatementListItem,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
/// Checks do-while statement parsing.
|
||||
#[test]
|
||||
fn check_do_while() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
r#"do {
|
||||
a += 1;
|
||||
} while (true)"#,
|
||||
vec![Statement::DoWhileLoop(DoWhileLoop::new(
|
||||
Statement::Block(
|
||||
vec![StatementListItem::Statement(Statement::Expression(
|
||||
Expression::from(Assign::new(
|
||||
AssignOp::Add,
|
||||
Identifier::new(interner.get_or_intern_static("a", utf16!("a"))).into(),
|
||||
Literal::from(1).into(),
|
||||
)),
|
||||
))]
|
||||
.into(),
|
||||
),
|
||||
Literal::from(true).into(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
// Checks automatic semicolon insertion after do-while.
|
||||
#[test]
|
||||
fn check_do_while_semicolon_insertion() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
r#"var i = 0;
|
||||
do {console.log("hello");} while(i++ < 10) console.log("end");"#,
|
||||
vec![
|
||||
Statement::Var(VarDeclaration(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("i", utf16!("i")).into(),
|
||||
Some(Literal::from(0).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into(),
|
||||
Statement::DoWhileLoop(DoWhileLoop::new(
|
||||
Statement::Block(
|
||||
vec![StatementListItem::Statement(Statement::Expression(
|
||||
Expression::from(Call::new(
|
||||
Expression::PropertyAccess(
|
||||
SimplePropertyAccess::new(
|
||||
Identifier::new(
|
||||
interner.get_or_intern_static("console", utf16!("console")),
|
||||
)
|
||||
.into(),
|
||||
interner.get_or_intern_static("log", utf16!("log")),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
vec![Literal::from(
|
||||
interner.get_or_intern_static("hello", utf16!("hello")),
|
||||
)
|
||||
.into()]
|
||||
.into(),
|
||||
)),
|
||||
))]
|
||||
.into(),
|
||||
),
|
||||
Binary::new(
|
||||
RelationalOp::LessThan.into(),
|
||||
Unary::new(
|
||||
UnaryOp::IncrementPost,
|
||||
Identifier::new(interner.get_or_intern_static("i", utf16!("i"))).into(),
|
||||
)
|
||||
.into(),
|
||||
Literal::from(10).into(),
|
||||
)
|
||||
.into(),
|
||||
))
|
||||
.into(),
|
||||
Statement::Expression(Expression::from(Call::new(
|
||||
Expression::PropertyAccess(
|
||||
SimplePropertyAccess::new(
|
||||
Identifier::new(
|
||||
interner.get_or_intern_static("console", utf16!("console")),
|
||||
)
|
||||
.into(),
|
||||
interner.get_or_intern_static("log", utf16!("log")),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
vec![Literal::from(interner.get_or_intern_static("end", utf16!("end"))).into()]
|
||||
.into(),
|
||||
)))
|
||||
.into(),
|
||||
],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
// Checks automatic semicolon insertion after do-while with no space between closing paren
|
||||
// and next statement.
|
||||
#[test]
|
||||
fn check_do_while_semicolon_insertion_no_space() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
r#"var i = 0;
|
||||
do {console.log("hello");} while(i++ < 10)console.log("end");"#,
|
||||
vec![
|
||||
Statement::Var(VarDeclaration(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("i", utf16!("i")).into(),
|
||||
Some(Literal::from(0).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into(),
|
||||
Statement::DoWhileLoop(DoWhileLoop::new(
|
||||
Statement::Block(
|
||||
vec![StatementListItem::Statement(Statement::Expression(
|
||||
Expression::from(Call::new(
|
||||
Expression::PropertyAccess(
|
||||
SimplePropertyAccess::new(
|
||||
Identifier::new(
|
||||
interner.get_or_intern_static("console", utf16!("console")),
|
||||
)
|
||||
.into(),
|
||||
interner.get_or_intern_static("log", utf16!("log")),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
vec![Literal::from(
|
||||
interner.get_or_intern_static("hello", utf16!("hello")),
|
||||
)
|
||||
.into()]
|
||||
.into(),
|
||||
)),
|
||||
))]
|
||||
.into(),
|
||||
),
|
||||
Binary::new(
|
||||
RelationalOp::LessThan.into(),
|
||||
Unary::new(
|
||||
UnaryOp::IncrementPost,
|
||||
Identifier::new(interner.get_or_intern_static("i", utf16!("i"))).into(),
|
||||
)
|
||||
.into(),
|
||||
Literal::from(10).into(),
|
||||
)
|
||||
.into(),
|
||||
))
|
||||
.into(),
|
||||
Statement::Expression(Expression::from(Call::new(
|
||||
Expression::PropertyAccess(
|
||||
SimplePropertyAccess::new(
|
||||
Identifier::new(
|
||||
interner.get_or_intern_static("console", utf16!("console")),
|
||||
)
|
||||
.into(),
|
||||
interner.get_or_intern_static("log", utf16!("log")),
|
||||
)
|
||||
.into(),
|
||||
),
|
||||
vec![Literal::from(interner.get_or_intern_static("end", utf16!("end"))).into()]
|
||||
.into(),
|
||||
)))
|
||||
.into(),
|
||||
],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks parsing of a while statement which is seperated out with line terminators.
|
||||
#[test]
|
||||
fn while_spaces() {
|
||||
check_parser(
|
||||
r#"
|
||||
|
||||
while
|
||||
|
||||
(
|
||||
|
||||
true
|
||||
|
||||
)
|
||||
|
||||
break;
|
||||
|
||||
"#,
|
||||
vec![Statement::WhileLoop(WhileLoop::new(
|
||||
Literal::from(true).into(),
|
||||
Break::new(None).into(),
|
||||
))
|
||||
.into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks parsing of a while statement which is seperated out with line terminators.
|
||||
#[test]
|
||||
fn do_while_spaces() {
|
||||
check_parser(
|
||||
r#"
|
||||
|
||||
do
|
||||
|
||||
{
|
||||
|
||||
break;
|
||||
|
||||
}
|
||||
|
||||
while (true)
|
||||
|
||||
"#,
|
||||
vec![Statement::DoWhileLoop(DoWhileLoop::new(
|
||||
Block::from(vec![StatementListItem::Statement(Statement::Break(
|
||||
Break::new(None),
|
||||
))])
|
||||
.into(),
|
||||
Literal::Bool(true).into(),
|
||||
))
|
||||
.into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks rejection of const bindings without init in for loops
|
||||
#[test]
|
||||
fn reject_const_no_init_for_loop() {
|
||||
check_invalid("for (const h;;);");
|
||||
}
|
||||
|
||||
/// Checks rejection of for await .. in loops
|
||||
#[test]
|
||||
fn reject_for_await_in_loop() {
|
||||
check_invalid("for await (x in [1,2,3]);");
|
||||
}
|
||||
77
javascript-engine/external/boa/boa_parser/src/parser/statement/iteration/while_statement.rs
vendored
Normal file
77
javascript-engine/external/boa/boa_parser/src/parser/statement/iteration/while_statement.rs
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
use crate::{
|
||||
parser::{
|
||||
expression::Expression, statement::Statement, AllowAwait, AllowReturn, AllowYield, Cursor,
|
||||
OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{statement::WhileLoop, Keyword, Punctuator};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// While statement parsing
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/while
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-while-statement
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser::statement) struct WhileStatement {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
allow_return: AllowReturn,
|
||||
}
|
||||
|
||||
impl WhileStatement {
|
||||
/// Creates a new `WhileStatement` parser.
|
||||
pub(in crate::parser::statement) fn new<Y, A, R>(
|
||||
allow_yield: Y,
|
||||
allow_await: A,
|
||||
allow_return: R,
|
||||
) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
R: Into<AllowReturn>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
allow_return: allow_return.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for WhileStatement
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = WhileLoop;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("WhileStatement", "Parsing");
|
||||
cursor.expect((Keyword::While, false), "while statement", interner)?;
|
||||
|
||||
cursor.expect(Punctuator::OpenParen, "while statement", interner)?;
|
||||
|
||||
let cond = Expression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
cursor.expect(Punctuator::CloseParen, "while statement", interner)?;
|
||||
|
||||
let position = cursor.peek(0, interner).or_abrupt()?.span().start();
|
||||
|
||||
let body = Statement::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
// Early Error: It is a Syntax Error if IsLabelledFunction(the second Statement) is true.
|
||||
if body.is_labelled_function() {
|
||||
return Err(Error::wrong_labelled_function_declaration(position));
|
||||
}
|
||||
|
||||
Ok(WhileLoop::new(cond, body))
|
||||
}
|
||||
}
|
||||
84
javascript-engine/external/boa/boa_parser/src/parser/statement/labelled_stm/mod.rs
vendored
Normal file
84
javascript-engine/external/boa/boa_parser/src/parser/statement/labelled_stm/mod.rs
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{
|
||||
cursor::Cursor,
|
||||
expression::LabelIdentifier,
|
||||
statement::{declaration::FunctionDeclaration, AllowAwait, AllowReturn, Statement},
|
||||
AllowYield, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{self as ast, Keyword, Punctuator};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Labelled Statement Parsing
|
||||
///
|
||||
/// More information
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/label
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-labelled-statements
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct LabelledStatement {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
allow_return: AllowReturn,
|
||||
}
|
||||
|
||||
impl LabelledStatement {
|
||||
pub(super) fn new<Y, A, R>(allow_yield: Y, allow_await: A, allow_return: R) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
R: Into<AllowReturn>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
allow_return: allow_return.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for LabelledStatement
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::statement::Labelled;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("Label", "Parsing");
|
||||
|
||||
let label = LabelIdentifier::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?
|
||||
.sym();
|
||||
|
||||
cursor.expect(Punctuator::Colon, "Labelled Statement", interner)?;
|
||||
|
||||
let strict = cursor.strict_mode();
|
||||
let next_token = cursor.peek(0, interner).or_abrupt()?;
|
||||
|
||||
let labelled_item = match next_token.kind() {
|
||||
// Early Error: It is a Syntax Error if any strict mode source code matches this rule.
|
||||
// https://tc39.es/ecma262/#sec-labelled-statements-static-semantics-early-errors
|
||||
// https://tc39.es/ecma262/#sec-labelled-function-declarations
|
||||
TokenKind::Keyword((Keyword::Function, _)) if strict => {
|
||||
return Err(Error::general(
|
||||
"In strict mode code, functions can only be declared at top level or inside a block.",
|
||||
next_token.span().start()
|
||||
))
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Function, _)) => {
|
||||
FunctionDeclaration::new(self.allow_yield, self.allow_await, false)
|
||||
.parse(cursor, interner)?
|
||||
.into()
|
||||
}
|
||||
_ => Statement::new(self.allow_yield, self.allow_await, self.allow_return).parse(cursor, interner)?.into()
|
||||
};
|
||||
|
||||
Ok(ast::statement::Labelled::new(labelled_item, label))
|
||||
}
|
||||
}
|
||||
839
javascript-engine/external/boa/boa_parser/src/parser/statement/mod.rs
vendored
Normal file
839
javascript-engine/external/boa/boa_parser/src/parser/statement/mod.rs
vendored
Normal file
@@ -0,0 +1,839 @@
|
||||
//! Statement and declaration parsing.
|
||||
//!
|
||||
//! More information:
|
||||
//! - [MDN documentation][mdn]
|
||||
//! - [ECMAScript specification][spec]
|
||||
//!
|
||||
//! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements
|
||||
//! [spec]: https://tc39.es/ecma262/#sec-ecmascript-language-statements-and-declarations
|
||||
|
||||
mod block;
|
||||
mod break_stm;
|
||||
mod continue_stm;
|
||||
mod declaration;
|
||||
mod expression;
|
||||
mod if_stm;
|
||||
mod iteration;
|
||||
mod labelled_stm;
|
||||
mod return_stm;
|
||||
mod switch;
|
||||
mod throw;
|
||||
mod try_stm;
|
||||
mod variable;
|
||||
|
||||
use self::{
|
||||
block::BlockStatement,
|
||||
break_stm::BreakStatement,
|
||||
continue_stm::ContinueStatement,
|
||||
declaration::Declaration,
|
||||
expression::ExpressionStatement,
|
||||
if_stm::IfStatement,
|
||||
iteration::{DoWhileStatement, ForStatement, WhileStatement},
|
||||
labelled_stm::LabelledStatement,
|
||||
return_stm::ReturnStatement,
|
||||
switch::SwitchStatement,
|
||||
throw::ThrowStatement,
|
||||
try_stm::TryStatement,
|
||||
variable::VariableStatement,
|
||||
};
|
||||
use crate::{
|
||||
lexer::{Error as LexError, InputElement, Token, TokenKind},
|
||||
parser::{
|
||||
expression::{BindingIdentifier, Initializer, PropertyName},
|
||||
AllowAwait, AllowReturn, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
self as ast,
|
||||
pattern::{ArrayPattern, ArrayPatternElement, ObjectPatternElement},
|
||||
Keyword, Punctuator,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
pub(in crate::parser) use declaration::ClassTail;
|
||||
|
||||
/// Statement parsing.
|
||||
///
|
||||
/// This can be one of the following:
|
||||
///
|
||||
/// - `BlockStatement`
|
||||
/// - `VariableStatement`
|
||||
/// - `EmptyStatement`
|
||||
/// - `ExpressionStatement`
|
||||
/// - `IfStatement`
|
||||
/// - `BreakableStatement`
|
||||
/// - `ContinueStatement`
|
||||
/// - `BreakStatement`
|
||||
/// - `ReturnStatement`
|
||||
/// - `WithStatement`
|
||||
/// - `LabelledStatement`
|
||||
/// - `ThrowStatement`
|
||||
/// - `SwitchStatement`
|
||||
/// - `TryStatement`
|
||||
/// - `DebuggerStatement`
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-Statement
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct Statement {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
allow_return: AllowReturn,
|
||||
}
|
||||
|
||||
impl Statement {
|
||||
/// Creates a new `Statement` parser.
|
||||
pub(super) fn new<Y, A, R>(allow_yield: Y, allow_await: A, allow_return: R) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
R: Into<AllowReturn>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
allow_return: allow_return.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for Statement
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::Statement;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("Statement", "Parsing");
|
||||
// TODO: add BreakableStatement and divide Whiles, fors and so on to another place.
|
||||
let tok = cursor.peek(0, interner).or_abrupt()?;
|
||||
|
||||
match tok.kind() {
|
||||
TokenKind::Keyword((Keyword::If, _)) => {
|
||||
IfStatement::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)
|
||||
.map(ast::Statement::from)
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Var, _)) => {
|
||||
VariableStatement::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)
|
||||
.map(ast::Statement::from)
|
||||
}
|
||||
TokenKind::Keyword((Keyword::While, _)) => {
|
||||
WhileStatement::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)
|
||||
.map(ast::Statement::from)
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Do, _)) => {
|
||||
DoWhileStatement::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)
|
||||
.map(ast::Statement::from)
|
||||
}
|
||||
TokenKind::Keyword((Keyword::For, _)) => {
|
||||
ForStatement::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)
|
||||
.map(ast::Statement::from)
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Return, _)) => {
|
||||
if self.allow_return.0 {
|
||||
ReturnStatement::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)
|
||||
.map(ast::Statement::from)
|
||||
} else {
|
||||
Err(Error::unexpected(
|
||||
tok.to_string(interner),
|
||||
tok.span(),
|
||||
"statement",
|
||||
))
|
||||
}
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Break, _)) => {
|
||||
BreakStatement::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)
|
||||
.map(ast::Statement::from)
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Continue, _)) => {
|
||||
ContinueStatement::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)
|
||||
.map(ast::Statement::from)
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Try, _)) => {
|
||||
TryStatement::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)
|
||||
.map(ast::Statement::from)
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Throw, _)) => {
|
||||
ThrowStatement::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)
|
||||
.map(ast::Statement::from)
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Switch, _)) => {
|
||||
SwitchStatement::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)
|
||||
.map(ast::Statement::from)
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenBlock) => {
|
||||
BlockStatement::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)
|
||||
.map(ast::Statement::from)
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Semicolon) => {
|
||||
// parse the EmptyStatement
|
||||
cursor.advance(interner);
|
||||
Ok(ast::Statement::Empty)
|
||||
}
|
||||
TokenKind::Identifier(_) => {
|
||||
// Labelled Statement check
|
||||
cursor.set_goal(InputElement::Div);
|
||||
let tok = cursor.peek(1, interner)?;
|
||||
|
||||
if let Some(tok) = tok {
|
||||
if matches!(tok.kind(), TokenKind::Punctuator(Punctuator::Colon)) {
|
||||
return LabelledStatement::new(
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
self.allow_return,
|
||||
)
|
||||
.parse(cursor, interner)
|
||||
.map(ast::Statement::from);
|
||||
}
|
||||
}
|
||||
|
||||
ExpressionStatement::new(self.allow_yield, self.allow_await).parse(cursor, interner)
|
||||
}
|
||||
|
||||
_ => {
|
||||
ExpressionStatement::new(self.allow_yield, self.allow_await).parse(cursor, interner)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Reads a list of statements.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-StatementList
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct StatementList {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
allow_return: AllowReturn,
|
||||
break_nodes: &'static [TokenKind],
|
||||
directive_prologues: bool,
|
||||
strict: bool,
|
||||
}
|
||||
|
||||
impl StatementList {
|
||||
/// Creates a new `StatementList` parser.
|
||||
pub(super) fn new<Y, A, R>(
|
||||
allow_yield: Y,
|
||||
allow_await: A,
|
||||
allow_return: R,
|
||||
break_nodes: &'static [TokenKind],
|
||||
directive_prologues: bool,
|
||||
strict: bool,
|
||||
) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
R: Into<AllowReturn>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
allow_return: allow_return.into(),
|
||||
break_nodes,
|
||||
directive_prologues,
|
||||
strict,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for StatementList
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::StatementList;
|
||||
|
||||
/// The function parses a `node::StatementList` using the `StatementList`'s
|
||||
/// `break_nodes` to know when to terminate.
|
||||
///
|
||||
/// Returns a `ParseError::AbruptEnd` if end of stream is reached before a
|
||||
/// break token.
|
||||
///
|
||||
/// Returns a `ParseError::unexpected` if an unexpected token is found.
|
||||
///
|
||||
/// Note that the last token which causes the parse to finish is not
|
||||
/// consumed.
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("StatementList", "Parsing");
|
||||
let mut items = Vec::new();
|
||||
|
||||
let global_strict = cursor.strict_mode();
|
||||
let mut directive_prologues = self.directive_prologues;
|
||||
let mut strict = self.strict;
|
||||
|
||||
loop {
|
||||
match cursor.peek(0, interner)? {
|
||||
Some(token) if self.break_nodes.contains(token.kind()) => break,
|
||||
None => break,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let item =
|
||||
StatementListItem::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
if directive_prologues {
|
||||
if let ast::StatementListItem::Statement(ast::Statement::Expression(
|
||||
ast::Expression::Literal(ast::expression::literal::Literal::String(string)),
|
||||
)) = &item
|
||||
{
|
||||
if interner.resolve_expect(*string).join(
|
||||
|s| s == "use strict",
|
||||
|g| g == utf16!("use strict"),
|
||||
true,
|
||||
) {
|
||||
cursor.set_strict_mode(true);
|
||||
strict = true;
|
||||
}
|
||||
} else {
|
||||
directive_prologues = false;
|
||||
}
|
||||
}
|
||||
|
||||
items.push(item);
|
||||
}
|
||||
|
||||
items.sort_by(ast::StatementListItem::hoistable_order);
|
||||
|
||||
cursor.set_strict_mode(global_strict);
|
||||
|
||||
Ok(ast::StatementList::new(items, strict))
|
||||
}
|
||||
}
|
||||
|
||||
/// Statement list item parsing
|
||||
///
|
||||
/// A statement list item can either be an statement or a declaration.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-StatementListItem
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct StatementListItem {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
allow_return: AllowReturn,
|
||||
}
|
||||
|
||||
impl StatementListItem {
|
||||
/// Creates a new `StatementListItem` parser.
|
||||
fn new<Y, A, R>(allow_yield: Y, allow_await: A, allow_return: R) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
R: Into<AllowReturn>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
allow_return: allow_return.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for StatementListItem
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = ast::StatementListItem;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("StatementListItem", "Parsing");
|
||||
let tok = cursor.peek(0, interner).or_abrupt()?;
|
||||
|
||||
match *tok.kind() {
|
||||
TokenKind::Keyword((
|
||||
Keyword::Function | Keyword::Class | Keyword::Const | Keyword::Let,
|
||||
_,
|
||||
)) => Declaration::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)
|
||||
.map(ast::StatementListItem::from),
|
||||
TokenKind::Keyword((Keyword::Async, _)) => {
|
||||
match cursor.peek(1, interner)?.map(Token::kind) {
|
||||
Some(TokenKind::Keyword((Keyword::Function, _))) => {
|
||||
Declaration::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)
|
||||
.map(ast::StatementListItem::from)
|
||||
}
|
||||
_ => Statement::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)
|
||||
.map(ast::StatementListItem::from),
|
||||
}
|
||||
}
|
||||
_ => Statement::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)
|
||||
.map(ast::StatementListItem::from),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// `ObjectBindingPattern` pattern parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-ObjectBindingPattern
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct ObjectBindingPattern {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl ObjectBindingPattern {
|
||||
/// Creates a new `ObjectBindingPattern` parser.
|
||||
pub(super) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for ObjectBindingPattern
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Vec<ObjectPatternElement>;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("ObjectBindingPattern", "Parsing");
|
||||
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::OpenBlock),
|
||||
"object binding pattern",
|
||||
interner,
|
||||
)?;
|
||||
|
||||
let mut patterns = Vec::new();
|
||||
let mut property_names = Vec::new();
|
||||
|
||||
loop {
|
||||
let next_token_is_colon = *cursor.peek(1, interner).or_abrupt()?.kind()
|
||||
== TokenKind::Punctuator(Punctuator::Colon);
|
||||
let token = cursor.peek(0, interner).or_abrupt()?;
|
||||
match token.kind() {
|
||||
TokenKind::Punctuator(Punctuator::CloseBlock) => {
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::CloseBlock),
|
||||
"object binding pattern",
|
||||
interner,
|
||||
)?;
|
||||
return Ok(patterns);
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Spread) => {
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::Spread),
|
||||
"object binding pattern",
|
||||
interner,
|
||||
)?;
|
||||
let ident = BindingIdentifier::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::CloseBlock),
|
||||
"object binding pattern",
|
||||
interner,
|
||||
)?;
|
||||
patterns.push(ObjectPatternElement::RestProperty {
|
||||
ident,
|
||||
excluded_keys: property_names,
|
||||
});
|
||||
return Ok(patterns);
|
||||
}
|
||||
_ => {
|
||||
let is_property_name = match token.kind() {
|
||||
TokenKind::Punctuator(Punctuator::OpenBracket)
|
||||
| TokenKind::StringLiteral(_)
|
||||
| TokenKind::NumericLiteral(_) => true,
|
||||
TokenKind::Identifier(_) if next_token_is_colon => true,
|
||||
TokenKind::Keyword(_) if next_token_is_colon => true,
|
||||
_ => false,
|
||||
};
|
||||
|
||||
if is_property_name {
|
||||
let property_name = PropertyName::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
if let Some(name) = property_name.prop_name() {
|
||||
property_names.push(name.into());
|
||||
}
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::Colon),
|
||||
"object binding pattern",
|
||||
interner,
|
||||
)?;
|
||||
if let Some(peek_token) = cursor.peek(0, interner)? {
|
||||
match peek_token.kind() {
|
||||
TokenKind::Punctuator(Punctuator::OpenBlock) => {
|
||||
let bindings = Self::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
if let Some(peek_token) = cursor.peek(0, interner)? {
|
||||
match peek_token.kind() {
|
||||
TokenKind::Punctuator(Punctuator::Assign) => {
|
||||
let init = Initializer::new(
|
||||
None,
|
||||
true,
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
patterns.push(ObjectPatternElement::Pattern {
|
||||
name: property_name,
|
||||
pattern: bindings.into(),
|
||||
default_init: Some(init),
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
patterns.push(ObjectPatternElement::Pattern {
|
||||
name: property_name,
|
||||
pattern: bindings.into(),
|
||||
default_init: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenBracket) => {
|
||||
let bindings = ArrayBindingPattern::new(
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
if let Some(peek_token) = cursor.peek(0, interner)? {
|
||||
match peek_token.kind() {
|
||||
TokenKind::Punctuator(Punctuator::Assign) => {
|
||||
let init = Initializer::new(
|
||||
None,
|
||||
true,
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
patterns.push(ObjectPatternElement::Pattern {
|
||||
name: property_name,
|
||||
pattern: ArrayPattern::new(bindings.into())
|
||||
.into(),
|
||||
default_init: Some(init),
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
patterns.push(ObjectPatternElement::Pattern {
|
||||
name: property_name,
|
||||
pattern: ArrayPattern::new(bindings.into())
|
||||
.into(),
|
||||
default_init: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// TODO: Currently parses only BindingIdentifier.
|
||||
// Should parse https://tc39.es/ecma262/#prod-PropertyName
|
||||
let ident =
|
||||
BindingIdentifier::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
if let Some(peek_token) = cursor.peek(0, interner)? {
|
||||
match peek_token.kind() {
|
||||
TokenKind::Punctuator(Punctuator::Assign) => {
|
||||
let init = Initializer::new(
|
||||
None,
|
||||
true,
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
patterns.push(ObjectPatternElement::SingleName {
|
||||
ident,
|
||||
name: property_name,
|
||||
default_init: Some(init),
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
patterns.push(ObjectPatternElement::SingleName {
|
||||
ident,
|
||||
name: property_name,
|
||||
default_init: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let name = BindingIdentifier::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
property_names.push(name);
|
||||
match cursor.peek(0, interner)?.map(Token::kind) {
|
||||
Some(TokenKind::Punctuator(Punctuator::Assign)) => {
|
||||
let init = Initializer::new(
|
||||
Some(name),
|
||||
true,
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
patterns.push(ObjectPatternElement::SingleName {
|
||||
ident: name,
|
||||
name: name.sym().into(),
|
||||
default_init: Some(init),
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
patterns.push(ObjectPatternElement::SingleName {
|
||||
ident: name,
|
||||
name: name.sym().into(),
|
||||
default_init: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(peek_token) = cursor.peek(0, interner)? {
|
||||
if let TokenKind::Punctuator(Punctuator::Comma) = peek_token.kind() {
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::Comma),
|
||||
"object binding pattern",
|
||||
interner,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// `ArrayBindingPattern` pattern parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-ArrayBindingPattern
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct ArrayBindingPattern {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl ArrayBindingPattern {
|
||||
/// Creates a new `ArrayBindingPattern` parser.
|
||||
pub(super) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for ArrayBindingPattern
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Vec<ArrayPatternElement>;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("ArrayBindingPattern", "Parsing");
|
||||
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::OpenBracket),
|
||||
"array binding pattern",
|
||||
interner,
|
||||
)?;
|
||||
|
||||
let mut patterns = Vec::new();
|
||||
let mut last_elision_or_first = true;
|
||||
|
||||
loop {
|
||||
match cursor.peek(0, interner).or_abrupt()?.kind() {
|
||||
TokenKind::Punctuator(Punctuator::CloseBracket) => {
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::CloseBracket),
|
||||
"array binding pattern",
|
||||
interner,
|
||||
)?;
|
||||
return Ok(patterns);
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Comma) => {
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::Comma),
|
||||
"array binding pattern",
|
||||
interner,
|
||||
)?;
|
||||
if last_elision_or_first {
|
||||
patterns.push(ArrayPatternElement::Elision);
|
||||
} else {
|
||||
last_elision_or_first = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::Spread) => {
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::Spread),
|
||||
"array binding pattern",
|
||||
interner,
|
||||
)?;
|
||||
|
||||
match cursor.peek(0, interner).or_abrupt()?.kind() {
|
||||
TokenKind::Punctuator(Punctuator::OpenBlock) => {
|
||||
let bindings =
|
||||
ObjectBindingPattern::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
patterns.push(ArrayPatternElement::PatternRest {
|
||||
pattern: bindings.into(),
|
||||
});
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenBracket) => {
|
||||
let bindings = Self::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
patterns.push(ArrayPatternElement::PatternRest {
|
||||
pattern: bindings.into(),
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
let rest_property_name =
|
||||
BindingIdentifier::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
patterns.push(ArrayPatternElement::SingleNameRest {
|
||||
ident: rest_property_name,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::CloseBracket),
|
||||
"array binding pattern",
|
||||
interner,
|
||||
)?;
|
||||
|
||||
return Ok(patterns);
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenBlock) => {
|
||||
last_elision_or_first = false;
|
||||
|
||||
let bindings = ObjectBindingPattern::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
match cursor.peek(0, interner).or_abrupt()?.kind() {
|
||||
TokenKind::Punctuator(Punctuator::Assign) => {
|
||||
let default_init =
|
||||
Initializer::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
patterns.push(ArrayPatternElement::Pattern {
|
||||
pattern: bindings.into(),
|
||||
default_init: Some(default_init),
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
patterns.push(ArrayPatternElement::Pattern {
|
||||
pattern: bindings.into(),
|
||||
default_init: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenBracket) => {
|
||||
last_elision_or_first = false;
|
||||
|
||||
let bindings =
|
||||
Self::new(self.allow_yield, self.allow_await).parse(cursor, interner)?;
|
||||
|
||||
match cursor.peek(0, interner).or_abrupt()?.kind() {
|
||||
TokenKind::Punctuator(Punctuator::Assign) => {
|
||||
let default_init =
|
||||
Initializer::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
patterns.push(ArrayPatternElement::Pattern {
|
||||
pattern: bindings.into(),
|
||||
default_init: Some(default_init),
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
patterns.push(ArrayPatternElement::Pattern {
|
||||
pattern: bindings.into(),
|
||||
default_init: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
last_elision_or_first = false;
|
||||
|
||||
let ident = BindingIdentifier::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
match cursor.peek(0, interner).or_abrupt()?.kind() {
|
||||
TokenKind::Punctuator(Punctuator::Assign) => {
|
||||
let default_init = Initializer::new(
|
||||
Some(ident),
|
||||
true,
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
patterns.push(ArrayPatternElement::SingleName {
|
||||
ident,
|
||||
default_init: Some(default_init),
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
patterns.push(ArrayPatternElement::SingleName {
|
||||
ident,
|
||||
default_init: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(peek_token) = cursor.peek(0, interner)? {
|
||||
if let TokenKind::Punctuator(Punctuator::Comma) = peek_token.kind() {
|
||||
cursor.expect(
|
||||
TokenKind::Punctuator(Punctuator::Comma),
|
||||
"array binding pattern",
|
||||
interner,
|
||||
)?;
|
||||
if last_elision_or_first {
|
||||
patterns.push(ArrayPatternElement::Elision);
|
||||
} else {
|
||||
last_elision_or_first = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
67
javascript-engine/external/boa/boa_parser/src/parser/statement/return_stm/mod.rs
vendored
Normal file
67
javascript-engine/external/boa/boa_parser/src/parser/statement/return_stm/mod.rs
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
use crate::{
|
||||
lexer::{Token, TokenKind},
|
||||
parser::{
|
||||
cursor::{Cursor, SemicolonResult},
|
||||
expression::Expression,
|
||||
AllowAwait, AllowYield, ParseResult, TokenParser,
|
||||
},
|
||||
};
|
||||
use boa_ast::{statement::Return, Keyword, Punctuator};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Return statement parsing
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/return
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-ReturnStatement
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct ReturnStatement {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl ReturnStatement {
|
||||
/// Creates a new `ReturnStatement` parser.
|
||||
pub(super) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for ReturnStatement
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Return;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("ReturnStatement", "Parsing");
|
||||
cursor.expect((Keyword::Return, false), "return statement", interner)?;
|
||||
|
||||
if let SemicolonResult::Found(tok) = cursor.peek_semicolon(interner)? {
|
||||
if tok.map(Token::kind) == Some(&TokenKind::Punctuator(Punctuator::Semicolon)) {
|
||||
cursor.advance(interner);
|
||||
}
|
||||
|
||||
return Ok(Return::new(None));
|
||||
}
|
||||
|
||||
let expr = Expression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
cursor.expect_semicolon("return statement", interner)?;
|
||||
|
||||
Ok(Return::new(Some(expr)))
|
||||
}
|
||||
}
|
||||
221
javascript-engine/external/boa/boa_parser/src/parser/statement/switch/mod.rs
vendored
Normal file
221
javascript-engine/external/boa/boa_parser/src/parser/statement/switch/mod.rs
vendored
Normal file
@@ -0,0 +1,221 @@
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{
|
||||
expression::Expression, statement::StatementList, AllowAwait, AllowReturn, AllowYield,
|
||||
Cursor, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use ast::operations::{lexically_declared_names_legacy, var_declared_names};
|
||||
use boa_ast::{self as ast, statement, statement::Switch, Keyword, Punctuator};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::io::Read;
|
||||
|
||||
/// The possible `TokenKind` which indicate the end of a case statement.
|
||||
const CASE_BREAK_TOKENS: [TokenKind; 3] = [
|
||||
TokenKind::Punctuator(Punctuator::CloseBlock),
|
||||
TokenKind::Keyword((Keyword::Case, false)),
|
||||
TokenKind::Keyword((Keyword::Default, false)),
|
||||
];
|
||||
|
||||
/// Switch statement parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/switch
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-SwitchStatement
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct SwitchStatement {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
allow_return: AllowReturn,
|
||||
}
|
||||
|
||||
impl SwitchStatement {
|
||||
/// Creates a new `SwitchStatement` parser.
|
||||
pub(super) fn new<Y, A, R>(allow_yield: Y, allow_await: A, allow_return: R) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
R: Into<AllowReturn>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
allow_return: allow_return.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for SwitchStatement
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Switch;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("SwitchStatement", "Parsing");
|
||||
cursor.expect((Keyword::Switch, false), "switch statement", interner)?;
|
||||
cursor.expect(Punctuator::OpenParen, "switch statement", interner)?;
|
||||
|
||||
let condition = Expression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
cursor.expect(Punctuator::CloseParen, "switch statement", interner)?;
|
||||
|
||||
let position = cursor.peek(0, interner).or_abrupt()?.span().start();
|
||||
|
||||
let (cases, default) =
|
||||
CaseBlock::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
let switch = Switch::new(condition, cases, default);
|
||||
|
||||
// It is a Syntax Error if the LexicallyDeclaredNames of CaseBlock contains any duplicate
|
||||
// entries, unless the source text matched by this production is not strict mode code and the
|
||||
// duplicate entries are only bound by FunctionDeclarations.
|
||||
let mut lexical_names = FxHashMap::default();
|
||||
for (name, is_fn) in lexically_declared_names_legacy(&switch) {
|
||||
if let Some(is_fn_previous) = lexical_names.insert(name, is_fn) {
|
||||
match (cursor.strict_mode(), is_fn, is_fn_previous) {
|
||||
(false, true, true) => {}
|
||||
_ => {
|
||||
return Err(Error::general(
|
||||
"lexical name declared multiple times",
|
||||
position,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// It is a Syntax Error if any element of the LexicallyDeclaredNames of CaseBlock also occurs
|
||||
// in the VarDeclaredNames of CaseBlock.
|
||||
for name in var_declared_names(&switch) {
|
||||
if lexical_names.contains_key(&name) {
|
||||
return Err(Error::general(
|
||||
"lexical name declared in var declared names",
|
||||
position,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(switch)
|
||||
}
|
||||
}
|
||||
|
||||
/// Switch case block parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-CaseBlock
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct CaseBlock {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
allow_return: AllowReturn,
|
||||
}
|
||||
|
||||
impl CaseBlock {
|
||||
/// Creates a new `CaseBlock` parser.
|
||||
fn new<Y, A, R>(allow_yield: Y, allow_await: A, allow_return: R) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
R: Into<AllowReturn>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
allow_return: allow_return.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for CaseBlock
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = (Box<[statement::Case]>, Option<ast::StatementList>);
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
cursor.expect(Punctuator::OpenBlock, "switch case block", interner)?;
|
||||
|
||||
let mut cases = Vec::new();
|
||||
let mut default = None;
|
||||
|
||||
loop {
|
||||
let token = cursor.next(interner).or_abrupt()?;
|
||||
match token.kind() {
|
||||
TokenKind::Keyword((Keyword::Case | Keyword::Default, true)) => {
|
||||
return Err(Error::general(
|
||||
"Keyword must not contain escaped characters",
|
||||
token.span().start(),
|
||||
));
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Case, false)) => {
|
||||
// Case statement.
|
||||
let cond = Expression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
cursor.expect(Punctuator::Colon, "switch case block", interner)?;
|
||||
|
||||
let statement_list = StatementList::new(
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
self.allow_return,
|
||||
&CASE_BREAK_TOKENS,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
cases.push(statement::Case::new(cond, statement_list));
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Default, false)) => {
|
||||
if default.is_some() {
|
||||
// If default has already been defined then it cannot be defined again and to do so is an error.
|
||||
return Err(Error::unexpected(
|
||||
token.to_string(interner),
|
||||
token.span(),
|
||||
Some("more than one switch default"),
|
||||
));
|
||||
}
|
||||
|
||||
cursor.expect(Punctuator::Colon, "switch default block", interner)?;
|
||||
|
||||
let statement_list = StatementList::new(
|
||||
self.allow_yield,
|
||||
self.allow_await,
|
||||
self.allow_return,
|
||||
&CASE_BREAK_TOKENS,
|
||||
false,
|
||||
false,
|
||||
)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
default = Some(statement_list);
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::CloseBlock) => break,
|
||||
_ => {
|
||||
return Err(Error::expected(
|
||||
["case".to_owned(), "default".to_owned(), "}".to_owned()],
|
||||
token.to_string(interner),
|
||||
token.span(),
|
||||
"switch case block",
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((cases.into_boxed_slice(), default))
|
||||
}
|
||||
}
|
||||
221
javascript-engine/external/boa/boa_parser/src/parser/statement/switch/tests.rs
vendored
Normal file
221
javascript-engine/external/boa/boa_parser/src/parser/statement/switch/tests.rs
vendored
Normal file
@@ -0,0 +1,221 @@
|
||||
use crate::parser::tests::{check_invalid, check_parser};
|
||||
use boa_ast::{
|
||||
declaration::{LexicalDeclaration, Variable},
|
||||
expression::{access::SimplePropertyAccess, literal::Literal, Call, Identifier},
|
||||
statement::{Break, Case, Switch},
|
||||
Declaration, Expression, Statement,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
/// Checks parsing malformed switch with no closeblock.
|
||||
#[test]
|
||||
fn check_switch_no_closeblock() {
|
||||
check_invalid(
|
||||
r#"
|
||||
let a = 10;
|
||||
switch (a) {
|
||||
case 10:
|
||||
a = 20;
|
||||
break;
|
||||
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks parsing malformed switch in which a case is started but not finished.
|
||||
#[test]
|
||||
fn check_switch_case_unclosed() {
|
||||
check_invalid(
|
||||
r#"
|
||||
let a = 10;
|
||||
switch (a) {
|
||||
case 10:
|
||||
a = 20;
|
||||
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks parsing malformed switch with 2 defaults.
|
||||
#[test]
|
||||
fn check_switch_two_default() {
|
||||
check_invalid(
|
||||
r#"
|
||||
let a = 10;
|
||||
switch (a) {
|
||||
default:
|
||||
a = 20;
|
||||
break;
|
||||
default:
|
||||
a = 30;
|
||||
break;
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks parsing malformed switch with no expression.
|
||||
#[test]
|
||||
fn check_switch_no_expr() {
|
||||
check_invalid(
|
||||
r#"
|
||||
let a = 10;
|
||||
switch {
|
||||
default:
|
||||
a = 20;
|
||||
break;
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks parsing malformed switch with an unknown label.
|
||||
#[test]
|
||||
fn check_switch_unknown_label() {
|
||||
check_invalid(
|
||||
r#"
|
||||
let a = 10;
|
||||
switch (a) {
|
||||
fake:
|
||||
a = 20;
|
||||
break;
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks parsing malformed switch with two defaults that are seperated by cases.
|
||||
#[test]
|
||||
fn check_switch_seperated_defaults() {
|
||||
check_invalid(
|
||||
r#"
|
||||
let a = 10;
|
||||
switch (a) {
|
||||
default:
|
||||
a = 20;
|
||||
break;
|
||||
case 10:
|
||||
a = 60;
|
||||
break;
|
||||
default:
|
||||
a = 30;
|
||||
break;
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
/// Example of JS code <https://jsfiddle.net/zq6jx47h/4/>.
|
||||
#[test]
|
||||
fn check_separated_switch() {
|
||||
let s = r#"
|
||||
let a = 10;
|
||||
|
||||
switch
|
||||
|
||||
(a)
|
||||
|
||||
{
|
||||
|
||||
case
|
||||
|
||||
5
|
||||
|
||||
:
|
||||
|
||||
console.log(5);
|
||||
|
||||
break;
|
||||
|
||||
case
|
||||
|
||||
10
|
||||
|
||||
:
|
||||
|
||||
console.log(10);
|
||||
|
||||
break;
|
||||
|
||||
default
|
||||
|
||||
:
|
||||
|
||||
console.log("Default")
|
||||
|
||||
}
|
||||
"#;
|
||||
|
||||
let interner = &mut Interner::default();
|
||||
let log = interner.get_or_intern_static("log", utf16!("log"));
|
||||
let console = interner.get_or_intern_static("console", utf16!("console"));
|
||||
let a = interner.get_or_intern_static("a", utf16!("a"));
|
||||
|
||||
check_parser(
|
||||
s,
|
||||
vec![
|
||||
Declaration::Lexical(LexicalDeclaration::Let(
|
||||
vec![Variable::from_identifier(
|
||||
a.into(),
|
||||
Some(Literal::from(10).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into(),
|
||||
Statement::Switch(Switch::new(
|
||||
Identifier::new(a).into(),
|
||||
vec![
|
||||
Case::new(
|
||||
Literal::from(5).into(),
|
||||
vec![
|
||||
Statement::Expression(Expression::from(Call::new(
|
||||
Expression::PropertyAccess(
|
||||
SimplePropertyAccess::new(Identifier::new(console).into(), log)
|
||||
.into(),
|
||||
),
|
||||
vec![Literal::from(5).into()].into(),
|
||||
)))
|
||||
.into(),
|
||||
Statement::Break(Break::new(None)).into(),
|
||||
]
|
||||
.into(),
|
||||
),
|
||||
Case::new(
|
||||
Literal::from(10).into(),
|
||||
vec![
|
||||
Statement::Expression(Expression::from(Call::new(
|
||||
Expression::PropertyAccess(
|
||||
SimplePropertyAccess::new(Identifier::new(console).into(), log)
|
||||
.into(),
|
||||
),
|
||||
vec![Literal::from(10).into()].into(),
|
||||
)))
|
||||
.into(),
|
||||
Statement::Break(Break::new(None)).into(),
|
||||
]
|
||||
.into(),
|
||||
),
|
||||
]
|
||||
.into(),
|
||||
Some(
|
||||
vec![Statement::Expression(Expression::from(Call::new(
|
||||
Expression::PropertyAccess(
|
||||
SimplePropertyAccess::new(Identifier::new(console).into(), log).into(),
|
||||
),
|
||||
vec![Literal::from(
|
||||
interner.get_or_intern_static("Default", utf16!("Default")),
|
||||
)
|
||||
.into()]
|
||||
.into(),
|
||||
)))
|
||||
.into()]
|
||||
.into(),
|
||||
),
|
||||
))
|
||||
.into(),
|
||||
],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
63
javascript-engine/external/boa/boa_parser/src/parser/statement/throw/mod.rs
vendored
Normal file
63
javascript-engine/external/boa/boa_parser/src/parser/statement/throw/mod.rs
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{expression::Expression, AllowAwait, AllowYield, Cursor, ParseResult, TokenParser},
|
||||
};
|
||||
use boa_ast::{statement::Throw, Keyword, Punctuator};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// For statement parsing
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/throw
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-ThrowStatement
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct ThrowStatement {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl ThrowStatement {
|
||||
/// Creates a new `ThrowStatement` parser.
|
||||
pub(super) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for ThrowStatement
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Throw;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("ThrowStatement", "Parsing");
|
||||
cursor.expect((Keyword::Throw, false), "throw statement", interner)?;
|
||||
|
||||
cursor.peek_expect_no_lineterminator(0, "throw statement", interner)?;
|
||||
|
||||
let expr = Expression::new(None, true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
if let Some(tok) = cursor.peek(0, interner)? {
|
||||
if tok.kind() == &TokenKind::Punctuator(Punctuator::Semicolon) {
|
||||
cursor.advance(interner);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Throw::new(expr))
|
||||
}
|
||||
}
|
||||
17
javascript-engine/external/boa/boa_parser/src/parser/statement/throw/tests.rs
vendored
Normal file
17
javascript-engine/external/boa/boa_parser/src/parser/statement/throw/tests.rs
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
use crate::parser::tests::check_parser;
|
||||
use boa_ast::{expression::literal::Literal, statement::Throw, Statement};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
#[test]
|
||||
fn check_throw_parsing() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"throw 'error';",
|
||||
vec![Statement::Throw(Throw::new(
|
||||
Literal::from(interner.get_or_intern_static("error", utf16!("error"))).into(),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
183
javascript-engine/external/boa/boa_parser/src/parser/statement/try_stm/catch.rs
vendored
Normal file
183
javascript-engine/external/boa/boa_parser/src/parser/statement/try_stm/catch.rs
vendored
Normal file
@@ -0,0 +1,183 @@
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{
|
||||
statement::{block::Block, ArrayBindingPattern, BindingIdentifier, ObjectBindingPattern},
|
||||
AllowAwait, AllowReturn, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
declaration::Binding,
|
||||
operations::{bound_names, lexically_declared_names, var_declared_names},
|
||||
statement, Keyword, Punctuator,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use rustc_hash::FxHashSet;
|
||||
use std::io::Read;
|
||||
|
||||
/// Catch parsing
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/try...catch
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-Catch
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct Catch {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
allow_return: AllowReturn,
|
||||
}
|
||||
|
||||
impl Catch {
|
||||
/// Creates a new `Catch` block parser.
|
||||
pub(super) fn new<Y, A, R>(allow_yield: Y, allow_await: A, allow_return: R) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
R: Into<AllowReturn>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
allow_return: allow_return.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for Catch
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = statement::Catch;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("Catch", "Parsing");
|
||||
cursor.expect((Keyword::Catch, false), "try statement", interner)?;
|
||||
let position = cursor.peek(0, interner).or_abrupt()?.span().start();
|
||||
let catch_param = if cursor.next_if(Punctuator::OpenParen, interner)?.is_some() {
|
||||
let catch_param =
|
||||
CatchParameter::new(self.allow_yield, self.allow_await).parse(cursor, interner)?;
|
||||
|
||||
cursor.expect(Punctuator::CloseParen, "catch in try statement", interner)?;
|
||||
Some(catch_param)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// It is a Syntax Error if BoundNames of CatchParameter contains any duplicate elements.
|
||||
// https://tc39.es/ecma262/#sec-try-statement-static-semantics-early-errors
|
||||
let bound_names: Option<FxHashSet<_>> = catch_param
|
||||
.as_ref()
|
||||
.map(|binding| {
|
||||
let mut set = FxHashSet::default();
|
||||
for ident in bound_names(binding) {
|
||||
if !set.insert(ident) {
|
||||
return Err(Error::general(
|
||||
"duplicate catch parameter identifier",
|
||||
position,
|
||||
));
|
||||
}
|
||||
}
|
||||
Ok(set)
|
||||
})
|
||||
.transpose()?;
|
||||
|
||||
let position = cursor.peek(0, interner).or_abrupt()?.span().start();
|
||||
let catch_block = Block::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
// It is a Syntax Error if any element of the BoundNames of CatchParameter also occurs in the LexicallyDeclaredNames of Block.
|
||||
// It is a Syntax Error if any element of the BoundNames of CatchParameter also occurs in the VarDeclaredNames of Block unless CatchParameter is CatchParameter : BindingIdentifier .
|
||||
// https://tc39.es/ecma262/#sec-try-statement-static-semantics-early-errors
|
||||
// https://tc39.es/ecma262/#sec-variablestatements-in-catch-blocks
|
||||
if let Some(bound_names) = bound_names {
|
||||
for name in lexically_declared_names(&catch_block) {
|
||||
if bound_names.contains(&name) {
|
||||
return Err(Error::general(
|
||||
"catch parameter identifier declared in catch body",
|
||||
position,
|
||||
));
|
||||
}
|
||||
}
|
||||
if !matches!(&catch_param, Some(Binding::Identifier(_))) {
|
||||
for name in var_declared_names(&catch_block) {
|
||||
if bound_names.contains(&name) {
|
||||
return Err(Error::general(
|
||||
"catch parameter identifier declared in catch body",
|
||||
position,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let catch_node = statement::Catch::new(catch_param, catch_block);
|
||||
Ok(catch_node)
|
||||
}
|
||||
}
|
||||
|
||||
/// `CatchParameter` parsing
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/try...catch
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-CatchParameter
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct CatchParameter {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl CatchParameter {
|
||||
/// Creates a new `CatchParameter` parser.
|
||||
pub(super) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for CatchParameter
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Binding;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let token = cursor.peek(0, interner).or_abrupt()?;
|
||||
|
||||
match token.kind() {
|
||||
TokenKind::Punctuator(Punctuator::OpenBlock) => {
|
||||
let pat = ObjectBindingPattern::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
Ok(Binding::Pattern(pat.into()))
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenBracket) => {
|
||||
let pat = ArrayBindingPattern::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
Ok(Binding::Pattern(pat.into()))
|
||||
}
|
||||
TokenKind::Identifier(_) => {
|
||||
let ident = BindingIdentifier::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
Ok(Binding::Identifier(ident))
|
||||
}
|
||||
_ => Err(Error::unexpected(
|
||||
token.to_string(interner),
|
||||
token.span(),
|
||||
None,
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
55
javascript-engine/external/boa/boa_parser/src/parser/statement/try_stm/finally.rs
vendored
Normal file
55
javascript-engine/external/boa/boa_parser/src/parser/statement/try_stm/finally.rs
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
use crate::parser::{
|
||||
statement::block::Block, AllowAwait, AllowReturn, AllowYield, Cursor, ParseResult, TokenParser,
|
||||
};
|
||||
use boa_ast::{statement, Keyword};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Finally parsing
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/try...catch
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-Finally
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct Finally {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
allow_return: AllowReturn,
|
||||
}
|
||||
|
||||
impl Finally {
|
||||
/// Creates a new `Finally` block parser.
|
||||
pub(super) fn new<Y, A, R>(allow_yield: Y, allow_await: A, allow_return: R) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
R: Into<AllowReturn>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
allow_return: allow_return.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for Finally
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = statement::Finally;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("Finally", "Parsing");
|
||||
cursor.expect((Keyword::Finally, false), "try statement", interner)?;
|
||||
Ok(
|
||||
Block::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)?
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
}
|
||||
123
javascript-engine/external/boa/boa_parser/src/parser/statement/try_stm/mod.rs
vendored
Normal file
123
javascript-engine/external/boa/boa_parser/src/parser/statement/try_stm/mod.rs
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
mod catch;
|
||||
mod finally;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use self::{catch::Catch, finally::Finally};
|
||||
use super::block::Block;
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{AllowAwait, AllowReturn, AllowYield, Cursor, OrAbrupt, ParseResult, TokenParser},
|
||||
Error,
|
||||
};
|
||||
use boa_ast::{
|
||||
statement::{ErrorHandler, Try},
|
||||
Keyword,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::io::Read;
|
||||
|
||||
/// Try...catch statement parsing
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/try...catch
|
||||
/// [spec]: https://tc39.es/ecma262/#sec-try-statement
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct TryStatement {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
allow_return: AllowReturn,
|
||||
}
|
||||
|
||||
impl TryStatement {
|
||||
/// Creates a new `TryStatement` parser.
|
||||
pub(super) fn new<Y, A, R>(allow_yield: Y, allow_await: A, allow_return: R) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
R: Into<AllowReturn>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
allow_return: allow_return.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for TryStatement
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Try;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("TryStatement", "Parsing");
|
||||
// TRY
|
||||
cursor.expect((Keyword::Try, false), "try statement", interner)?;
|
||||
|
||||
let try_clause = Block::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
let next_token = cursor.peek(0, interner).or_abrupt()?;
|
||||
match next_token.kind() {
|
||||
TokenKind::Keyword((Keyword::Catch | Keyword::Finally, true)) => {
|
||||
return Err(Error::general(
|
||||
"Keyword must not contain escaped characters",
|
||||
next_token.span().start(),
|
||||
));
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Catch | Keyword::Finally, false)) => {}
|
||||
_ => {
|
||||
return Err(Error::expected(
|
||||
["catch".to_owned(), "finally".to_owned()],
|
||||
next_token.to_string(interner),
|
||||
next_token.span(),
|
||||
"try statement",
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let catch = if next_token.kind() == &TokenKind::Keyword((Keyword::Catch, false)) {
|
||||
Some(
|
||||
Catch::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let next_token = cursor.peek(0, interner)?;
|
||||
let finally = if let Some(token) = next_token {
|
||||
match token.kind() {
|
||||
TokenKind::Keyword((Keyword::Finally, true)) => {
|
||||
return Err(Error::general(
|
||||
"Keyword must not contain escaped characters",
|
||||
token.span().start(),
|
||||
));
|
||||
}
|
||||
TokenKind::Keyword((Keyword::Finally, false)) => Some(
|
||||
Finally::new(self.allow_yield, self.allow_await, self.allow_return)
|
||||
.parse(cursor, interner)?,
|
||||
),
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let handler = match (catch, finally) {
|
||||
(Some(catch), None) => ErrorHandler::Catch(catch),
|
||||
(None, Some(finally)) => ErrorHandler::Finally(finally),
|
||||
(Some(catch), Some(finally)) => ErrorHandler::Full(catch, finally),
|
||||
(None, None) => unreachable!(),
|
||||
};
|
||||
|
||||
Ok(Try::new(try_clause, handler))
|
||||
}
|
||||
}
|
||||
299
javascript-engine/external/boa/boa_parser/src/parser/statement/try_stm/tests.rs
vendored
Normal file
299
javascript-engine/external/boa/boa_parser/src/parser/statement/try_stm/tests.rs
vendored
Normal file
@@ -0,0 +1,299 @@
|
||||
use std::convert::TryInto;
|
||||
|
||||
use crate::parser::tests::{check_invalid, check_parser};
|
||||
use boa_ast::{
|
||||
declaration::{VarDeclaration, Variable},
|
||||
expression::{literal::Literal, Identifier},
|
||||
pattern::{ArrayPatternElement, ObjectPatternElement, Pattern},
|
||||
property::PropertyName,
|
||||
statement::{Block, Catch, ErrorHandler, Finally, Try},
|
||||
Statement, StatementListItem,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_macros::utf16;
|
||||
|
||||
#[test]
|
||||
fn check_inline_with_empty_try_catch() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"try { } catch(e) {}",
|
||||
vec![Statement::Try(Try::new(
|
||||
Block::default(),
|
||||
ErrorHandler::Catch(Catch::new(
|
||||
Some(Identifier::from(interner.get_or_intern_static("e", utf16!("e"))).into()),
|
||||
Block::default(),
|
||||
)),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_inline_with_var_decl_inside_try() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"try { var x = 1; } catch(e) {}",
|
||||
vec![Statement::Try(Try::new(
|
||||
vec![Statement::Var(VarDeclaration(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(Literal::from(1).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()]
|
||||
.into(),
|
||||
ErrorHandler::Catch(Catch::new(
|
||||
Some(Identifier::from(interner.get_or_intern_static("e", utf16!("e"))).into()),
|
||||
Block::default(),
|
||||
)),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_inline_with_var_decl_inside_catch() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"try { var x = 1; } catch(e) { var x = 1; }",
|
||||
vec![Statement::Try(Try::new(
|
||||
vec![Statement::Var(VarDeclaration(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(Literal::from(1).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()]
|
||||
.into(),
|
||||
ErrorHandler::Catch(Catch::new(
|
||||
Some(Identifier::from(interner.get_or_intern_static("e", utf16!("e"))).into()),
|
||||
vec![Statement::Var(VarDeclaration(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(Literal::from(1).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()]
|
||||
.into(),
|
||||
)),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_inline_with_empty_try_catch_finally() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"try {} catch(e) {} finally {}",
|
||||
vec![Statement::Try(Try::new(
|
||||
Block::default(),
|
||||
ErrorHandler::Full(
|
||||
Catch::new(
|
||||
Some(Identifier::from(interner.get_or_intern_static("e", utf16!("e"))).into()),
|
||||
Block::default(),
|
||||
),
|
||||
Finally::from(Block::default()),
|
||||
),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_inline_with_empty_try_finally() {
|
||||
check_parser(
|
||||
"try {} finally {}",
|
||||
vec![Statement::Try(Try::new(
|
||||
Block::default(),
|
||||
ErrorHandler::Finally(Finally::from(Block::default())),
|
||||
))
|
||||
.into()],
|
||||
&mut Interner::default(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_inline_with_empty_try_var_decl_in_finally() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"try {} finally { var x = 1; }",
|
||||
vec![Statement::Try(Try::new(
|
||||
Block::default(),
|
||||
ErrorHandler::Finally(Finally::from(Block::from(vec![
|
||||
StatementListItem::Statement(Statement::Var(VarDeclaration(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(Literal::from(1).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))),
|
||||
]))),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_inline_empty_try_paramless_catch() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"try {} catch { var x = 1; }",
|
||||
vec![Statement::Try(Try::new(
|
||||
Block::default(),
|
||||
ErrorHandler::Catch(Catch::new(
|
||||
None,
|
||||
vec![Statement::Var(VarDeclaration(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("x", utf16!("x")).into(),
|
||||
Some(Literal::from(1).into()),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()]
|
||||
.into(),
|
||||
)),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_inline_with_binding_pattern_object() {
|
||||
let interner = &mut Interner::default();
|
||||
let a = interner.get_or_intern_static("a", utf16!("a"));
|
||||
check_parser(
|
||||
"try {} catch ({ a, b: c }) {}",
|
||||
vec![Statement::Try(Try::new(
|
||||
Block::default(),
|
||||
ErrorHandler::Catch(Catch::new(
|
||||
Some(
|
||||
Pattern::from(vec![
|
||||
ObjectPatternElement::SingleName {
|
||||
ident: a.into(),
|
||||
name: PropertyName::Literal(a),
|
||||
default_init: None,
|
||||
},
|
||||
ObjectPatternElement::SingleName {
|
||||
ident: interner.get_or_intern_static("c", utf16!("c")).into(),
|
||||
name: PropertyName::Literal(
|
||||
interner.get_or_intern_static("b", utf16!("b")),
|
||||
),
|
||||
default_init: None,
|
||||
},
|
||||
])
|
||||
.into(),
|
||||
),
|
||||
Block::default(),
|
||||
)),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_inline_with_binding_pattern_array() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"try {} catch ([a, b]) {}",
|
||||
vec![Statement::Try(Try::new(
|
||||
Block::from(vec![]),
|
||||
ErrorHandler::Catch(Catch::new(
|
||||
Some(
|
||||
Pattern::from(vec![
|
||||
ArrayPatternElement::SingleName {
|
||||
ident: interner.get_or_intern_static("a", utf16!("a")).into(),
|
||||
default_init: None,
|
||||
},
|
||||
ArrayPatternElement::SingleName {
|
||||
ident: interner.get_or_intern_static("b", utf16!("b")).into(),
|
||||
default_init: None,
|
||||
},
|
||||
])
|
||||
.into(),
|
||||
),
|
||||
Block::default(),
|
||||
)),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_catch_with_var_redeclaration() {
|
||||
let interner = &mut Interner::default();
|
||||
check_parser(
|
||||
"try {} catch(e) { var e = 'oh' }",
|
||||
vec![Statement::Try(Try::new(
|
||||
Block::from(vec![]),
|
||||
ErrorHandler::Catch(Catch::new(
|
||||
Some(Identifier::new(interner.get_or_intern_static("e", utf16!("e"))).into()),
|
||||
vec![Statement::Var(VarDeclaration(
|
||||
vec![Variable::from_identifier(
|
||||
interner.get_or_intern_static("e", utf16!("e")).into(),
|
||||
Some(
|
||||
Literal::from(interner.get_or_intern_static("oh", utf16!("oh"))).into(),
|
||||
),
|
||||
)]
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
))
|
||||
.into()]
|
||||
.into(),
|
||||
)),
|
||||
))
|
||||
.into()],
|
||||
interner,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_inline_invalid_catch() {
|
||||
check_invalid("try {} catch");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_inline_invalid_catch_without_closing_paren() {
|
||||
check_invalid("try {} catch(e {}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_inline_invalid_catch_parameter() {
|
||||
check_invalid("try {} catch(1) {}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_invalid_try_no_catch_finally() {
|
||||
check_invalid("try {} let a = 10;");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_invalid_catch_with_empty_paren() {
|
||||
check_invalid("try {} catch() {}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_invalid_catch_with_duplicate_params() {
|
||||
check_invalid("try {} catch({ a, b: a }) {}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_invalid_catch_with_lexical_redeclaration() {
|
||||
check_invalid("try {} catch(e) { let e = 'oh' }");
|
||||
}
|
||||
227
javascript-engine/external/boa/boa_parser/src/parser/statement/variable/mod.rs
vendored
Normal file
227
javascript-engine/external/boa/boa_parser/src/parser/statement/variable/mod.rs
vendored
Normal file
@@ -0,0 +1,227 @@
|
||||
//! Variable statement parsing.
|
||||
|
||||
use crate::{
|
||||
lexer::TokenKind,
|
||||
parser::{
|
||||
cursor::Cursor,
|
||||
expression::Initializer,
|
||||
statement::{ArrayBindingPattern, BindingIdentifier, ObjectBindingPattern},
|
||||
AllowAwait, AllowIn, AllowYield, OrAbrupt, ParseResult, TokenParser,
|
||||
},
|
||||
};
|
||||
use boa_ast::{
|
||||
declaration::{VarDeclaration, Variable},
|
||||
Keyword, Punctuator, Statement,
|
||||
};
|
||||
use boa_interner::Interner;
|
||||
use boa_profiler::Profiler;
|
||||
use std::{convert::TryInto, io::Read};
|
||||
|
||||
/// Variable statement parsing.
|
||||
///
|
||||
/// A variable statement contains the `var` keyword.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/var
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-VariableStatement
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser::statement) struct VariableStatement {
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl VariableStatement {
|
||||
/// Creates a new `VariableStatement` parser.
|
||||
pub(in crate::parser::statement) fn new<Y, A>(allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for VariableStatement
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Statement;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let _timer = Profiler::global().start_event("VariableStatement", "Parsing");
|
||||
cursor.expect((Keyword::Var, false), "variable statement", interner)?;
|
||||
|
||||
let decl_list = VariableDeclarationList::new(true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
cursor.expect_semicolon("variable statement", interner)?;
|
||||
|
||||
Ok(decl_list.into())
|
||||
}
|
||||
}
|
||||
|
||||
/// Variable declaration list parsing.
|
||||
///
|
||||
/// More information:
|
||||
/// - [MDN documentation][mdn]
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/var
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-VariableDeclarationList
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(in crate::parser::statement) struct VariableDeclarationList {
|
||||
allow_in: AllowIn,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl VariableDeclarationList {
|
||||
/// Creates a new `VariableDeclarationList` parser.
|
||||
pub(in crate::parser::statement) fn new<I, Y, A>(
|
||||
allow_in: I,
|
||||
allow_yield: Y,
|
||||
allow_await: A,
|
||||
) -> Self
|
||||
where
|
||||
I: Into<AllowIn>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_in: allow_in.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for VariableDeclarationList
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = VarDeclaration;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let mut list = Vec::new();
|
||||
|
||||
loop {
|
||||
list.push(
|
||||
VariableDeclaration::new(self.allow_in, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?,
|
||||
);
|
||||
|
||||
if cursor.next_if(Punctuator::Comma, interner)?.is_none() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(VarDeclaration(list.try_into().expect(
|
||||
"`VariableDeclaration` must parse at least one variable",
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
/// Reads an individual variable declaration.
|
||||
///
|
||||
/// More information:
|
||||
/// - [ECMAScript specification][spec]
|
||||
///
|
||||
/// [spec]: https://tc39.es/ecma262/#prod-VariableDeclaration
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct VariableDeclaration {
|
||||
allow_in: AllowIn,
|
||||
allow_yield: AllowYield,
|
||||
allow_await: AllowAwait,
|
||||
}
|
||||
|
||||
impl VariableDeclaration {
|
||||
/// Creates a new `VariableDeclaration` parser.
|
||||
fn new<I, Y, A>(allow_in: I, allow_yield: Y, allow_await: A) -> Self
|
||||
where
|
||||
I: Into<AllowIn>,
|
||||
Y: Into<AllowYield>,
|
||||
A: Into<AllowAwait>,
|
||||
{
|
||||
Self {
|
||||
allow_in: allow_in.into(),
|
||||
allow_yield: allow_yield.into(),
|
||||
allow_await: allow_await.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> TokenParser<R> for VariableDeclaration
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
type Output = Variable;
|
||||
|
||||
fn parse(self, cursor: &mut Cursor<R>, interner: &mut Interner) -> ParseResult<Self::Output> {
|
||||
let peek_token = cursor.peek(0, interner).or_abrupt()?;
|
||||
|
||||
match peek_token.kind() {
|
||||
TokenKind::Punctuator(Punctuator::OpenBlock) => {
|
||||
let bindings = ObjectBindingPattern::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
let init = if cursor
|
||||
.peek(0, interner)?
|
||||
.filter(|t| *t.kind() == TokenKind::Punctuator(Punctuator::Assign))
|
||||
.is_some()
|
||||
{
|
||||
Some(
|
||||
Initializer::new(None, self.allow_in, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Variable::from_pattern(bindings.into(), init))
|
||||
}
|
||||
TokenKind::Punctuator(Punctuator::OpenBracket) => {
|
||||
let bindings = ArrayBindingPattern::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
let init = if cursor
|
||||
.peek(0, interner)?
|
||||
.filter(|t| *t.kind() == TokenKind::Punctuator(Punctuator::Assign))
|
||||
.is_some()
|
||||
{
|
||||
Some(
|
||||
Initializer::new(None, self.allow_in, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Variable::from_pattern(bindings.into(), init))
|
||||
}
|
||||
_ => {
|
||||
let ident = BindingIdentifier::new(self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?;
|
||||
|
||||
let init = if cursor
|
||||
.peek(0, interner)?
|
||||
.filter(|t| *t.kind() == TokenKind::Punctuator(Punctuator::Assign))
|
||||
.is_some()
|
||||
{
|
||||
Some(
|
||||
Initializer::new(Some(ident), true, self.allow_yield, self.allow_await)
|
||||
.parse(cursor, interner)?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(Variable::from_identifier(ident, init))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
42
javascript-engine/external/boa/boa_parser/src/parser/tests/format/declaration.rs
vendored
Normal file
42
javascript-engine/external/boa/boa_parser/src/parser/tests/format/declaration.rs
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
use crate::parser::tests::format::test_formatting;
|
||||
|
||||
#[test]
|
||||
fn binding_pattern() {
|
||||
test_formatting(
|
||||
r#"
|
||||
var { } = {
|
||||
o: "1",
|
||||
};
|
||||
var { o_v1 } = {
|
||||
o_v1: "1",
|
||||
};
|
||||
var { o_v2 = "1" } = {
|
||||
o_v2: "2",
|
||||
};
|
||||
var { a : o_v3 = "1" } = {
|
||||
a: "2",
|
||||
};
|
||||
var { ... o_rest_v1 } = {
|
||||
a: "2",
|
||||
};
|
||||
var { o_v4, o_v5, o_v6 = "1", a : o_v7 = "1", ... o_rest_v2 } = {
|
||||
o_v4: "1",
|
||||
o_v5: "1",
|
||||
};
|
||||
var [] = [];
|
||||
var [ , ] = [];
|
||||
var [ a_v1 ] = [1, 2, 3];
|
||||
var [ a_v2, a_v3 ] = [1, 2, 3];
|
||||
var [ a_v2, , a_v3 ] = [1, 2, 3];
|
||||
var [ ... a_rest_v1 ] = [1, 2, 3];
|
||||
var [ a_v4, , ... a_rest_v2 ] = [1, 2, 3];
|
||||
var [ { a_v5 } ] = [{
|
||||
a_v5: 1,
|
||||
}, {
|
||||
a_v5: 2,
|
||||
}, {
|
||||
a_v5: 3,
|
||||
}];
|
||||
"#,
|
||||
);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user