Skip to content

Commit

Permalink
cargo fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
vcfxb committed Jul 19, 2024
1 parent cbba2a1 commit 6121689
Show file tree
Hide file tree
Showing 5 changed files with 34 additions and 22 deletions.
18 changes: 12 additions & 6 deletions wright/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,14 @@
//! [AST]: crate::ast
//! [Token]: crate::lexer::token::Token
use std::collections::VecDeque;
use error::ParserError;
use std::collections::VecDeque;

use crate::{lexer::token::{Token, TokenTy}, source_tracking::fragment::Fragment};
use super::lexer::Lexer;
use crate::{
lexer::token::{Token, TokenTy},
source_tracking::fragment::Fragment,
};

pub mod error;
mod identifier;
Expand All @@ -32,7 +35,9 @@ impl Parser {

/// Get the next [Token] from this [Parser]. This may be a clone of a token that's already been peeked.
pub fn next(&mut self) -> Option<Token> {
self.lookahead.pop_front().or_else(|| self.lexer.next_token())
self.lookahead
.pop_front()
.or_else(|| self.lexer.next_token())
}

/// Peek at the next token from the [Lexer] (cached in the lookahead queue if peeked before).
Expand All @@ -54,8 +59,8 @@ impl Parser {
&self.lexer
}

/// Lookahead `k` [Token]s.
///
/// Lookahead `k` [Token]s.
///
/// If `k == 0` then this is effectively peeking at the next [Token] from the wrapped [Lexer].
pub fn lookahead(&mut self, k: usize) -> Option<&Token> {
while self.lookahead.len() <= k {
Expand All @@ -68,7 +73,8 @@ impl Parser {
/// Get the next [Token] from this parser if its [Token::variant] is the given `token_ty`.
pub fn next_if_is(&mut self, token_ty: TokenTy) -> Option<Token> {
// Peeking successfully first means that the lookahead vec will never be empty here.
(self.peek()?.variant == token_ty).then(|| unsafe { self.lookahead.pop_front().unwrap_unchecked() })
(self.peek()?.variant == token_ty)
.then(|| unsafe { self.lookahead.pop_front().unwrap_unchecked() })
}

/// Peek at the next [Token], remove it if it's a [TokenTy::Whitespace].
Expand Down
12 changes: 7 additions & 5 deletions wright/src/parser/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,12 @@ impl ParserErrorKind {
match self {
ExpectedIdentifier => "expected identifier",
ExpectedPath => "expected path or identifier",
UnterminatedMultilineCommentEncountered => "encountered unterminated multiline comment while parsing",
UnterminatedStringLiteralEncountered => "encountered unterminated string literal while parsing",
UnterminatedMultilineCommentEncountered => {
"encountered unterminated multiline comment while parsing"
}
UnterminatedStringLiteralEncountered => {
"encountered unterminated string literal while parsing"
}
}
}

Expand Down Expand Up @@ -55,9 +59,7 @@ pub struct ParserError {
impl ParserError {
/// Turn this parser error into a full blown compiler error.
pub fn as_diagnostic(self) -> Diagnostic {
let description = self
.kind
.describe();
let description = self.kind.describe();

let mut diagnostic = Diagnostic::error()
.with_code(self.kind.error_code_string())
Expand Down
5 changes: 2 additions & 3 deletions wright/src/parser/identifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,8 @@ impl Parse for Identifier {
kind: ParserErrorKind::ExpectedIdentifier,
location: parser.lexer.remaining.clone(),
help: Some("found end of source".into()),
})

}
}),
},
}
}
}
Expand Down
10 changes: 4 additions & 6 deletions wright/src/parser/path.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,12 @@ impl Parse for Path {

/// Parse the first (and possibly only) [Identifier] in the [Path].
fn parse_head(parser: &mut Parser) -> Result<Identifier, ParserError> {
Identifier::parse(parser)
.map_err(|mut err| {
err.kind = ParserErrorKind::ExpectedPath;
err
})
Identifier::parse(parser).map_err(|mut err| {
err.kind = ParserErrorKind::ExpectedPath;
err
})
}


// /// Parse a path (`head::tail`) in source code.
// pub fn parse_path<'src>(parser_state: &mut ParserState<'src>) -> NodeParserResult<Path<'src>> {
// // Get the initial index to make metadata at the end.
Expand Down
11 changes: 9 additions & 2 deletions wright/src/parser/whitespace.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,21 @@
//! Utilities for parsing through whitespace.
use crate::lexer::{token::{Token, TokenTy}, Lexer};
use crate::lexer::{
token::{Token, TokenTy},
Lexer,
};
use std::mem;

/// Consume and ignore a [TokenTy::Whitespace] from the front of the lexer.
/// If there is not one, do nothing.
pub fn optional_whitespace(lexer: &mut Lexer) {
let mut fork = lexer.fork();

if let Some(Token { variant: TokenTy::Whitespace, .. }) = fork.next_token() {
if let Some(Token {
variant: TokenTy::Whitespace,
..
}) = fork.next_token()
{
// Replace the original lexer with the fork.
let _ = mem::replace(lexer, fork);
}
Expand Down

0 comments on commit 6121689

Please sign in to comment.