summaryrefslogtreecommitdiff
path: root/src/parse
diff options
context:
space:
mode:
Diffstat (limited to 'src/parse')
-rw-r--r--src/parse/mod.rs275
-rw-r--r--src/parse/parser.rs73
-rw-r--r--src/parse/resolve.rs8
-rw-r--r--src/parse/tokens.rs16
4 files changed, 176 insertions, 196 deletions
diff --git a/src/parse/mod.rs b/src/parse/mod.rs
index dc769183..0425f824 100644
--- a/src/parse/mod.rs
+++ b/src/parse/mod.rs
@@ -30,15 +30,14 @@ fn markup(p: &mut Parser) {
/// Parse markup that stays right of the given column.
fn markup_indented(p: &mut Parser, column: usize) {
- // TODO this is broken
p.eat_while(|t| match t {
- NodeKind::Space(n) => n == 0,
+ NodeKind::Space(n) => *n == 0,
NodeKind::LineComment | NodeKind::BlockComment => true,
_ => false,
});
markup_while(p, false, &mut |p| match p.peek() {
- Some(NodeKind::Space(n)) if n >= 1 => p.column(p.next_end()) >= column,
+ Some(NodeKind::Space(n)) if *n >= 1 => p.column(p.next_end()) >= column,
_ => true,
})
}
@@ -64,125 +63,119 @@ where
/// Parse a markup node.
fn markup_node(p: &mut Parser, at_start: &mut bool) {
- if let Some(token) = p.peek() {
- match token {
- // Whitespace.
- NodeKind::Space(newlines) => {
- *at_start |= newlines > 0;
-
- if newlines < 2 {
- p.eat();
- } else {
- p.convert(NodeKind::Parbreak);
- }
- }
+ let token = match p.peek() {
+ Some(t) => t,
+ None => return,
+ };
- // Text.
- NodeKind::UnicodeEscape(u) => {
- if !u.terminated {
- p.convert(NodeKind::Error(
- ErrorPosition::End,
- "expected closing brace".into(),
- ));
- p.unsuccessful();
- return;
- }
-
- if u.character.is_none() {
- let src = p.peek_src();
- p.convert(NodeKind::Error(
- ErrorPosition::Full,
- "invalid unicode escape sequence".into(),
- ));
- p.start();
- p.end(NodeKind::Text(src.into()));
- return;
- }
+ match token {
+ // Whitespace.
+ NodeKind::Space(newlines) => {
+ *at_start |= *newlines > 0;
+ if *newlines < 2 {
p.eat();
+ } else {
+ p.convert(NodeKind::Parbreak);
}
- NodeKind::Raw(r) => {
- if !r.terminated {
- p.convert(NodeKind::Error(
- ErrorPosition::End,
- "expected backtick(s)".into(),
- ));
- p.unsuccessful();
- return;
- }
+ }
- p.eat();
+ // Text and markup.
+ NodeKind::Text(_)
+ | NodeKind::EnDash
+ | NodeKind::EmDash
+ | NodeKind::NonBreakingSpace
+ | NodeKind::Emph
+ | NodeKind::Strong
+ | NodeKind::Linebreak => p.eat(),
+
+ NodeKind::UnicodeEscape(u) => {
+ if !u.terminated {
+ p.convert(NodeKind::Error(
+ ErrorPosition::End,
+ "expected closing brace".into(),
+ ));
+ p.unsuccessful();
+ return;
}
- NodeKind::Text(_)
- | NodeKind::EnDash
- | NodeKind::EmDash
- | NodeKind::NonBreakingSpace => {
- p.eat();
+
+ if u.character.is_none() {
+ let src = p.peek_src();
+ p.convert(NodeKind::Error(
+ ErrorPosition::Full,
+ "invalid unicode escape sequence".into(),
+ ));
+ p.start();
+ p.end(NodeKind::Text(src.into()));
+ return;
}
- // Markup.
- NodeKind::Emph | NodeKind::Strong | NodeKind::Linebreak => {
- p.eat();
+ p.eat();
+ }
+ NodeKind::Raw(r) => {
+ if !r.terminated {
+ p.convert(NodeKind::Error(
+ ErrorPosition::End,
+ "expected backtick(s)".into(),
+ ));
+ p.unsuccessful();
+ return;
}
- NodeKind::Eq if *at_start => heading(p),
- NodeKind::ListBullet if *at_start => list_node(p),
- NodeKind::EnumNumbering(_) if *at_start => enum_node(p),
+ p.eat();
+ }
- // Line-based markup that is not currently at the start of the line.
- NodeKind::Eq | NodeKind::ListBullet | NodeKind::EnumNumbering(_) => {
- p.convert(NodeKind::Text(p.peek_src().into()))
- }
+ NodeKind::Eq if *at_start => heading(p),
+ NodeKind::ListBullet if *at_start => list_node(p),
+ NodeKind::EnumNumbering(_) if *at_start => enum_node(p),
- // Hashtag + keyword / identifier.
- NodeKind::Ident(_)
- | NodeKind::Let
- | NodeKind::If
- | NodeKind::While
- | NodeKind::For
- | NodeKind::Import
- | NodeKind::Include => {
- let stmt = matches!(token, NodeKind::Let | NodeKind::Import);
- let group = if stmt { Group::Stmt } else { Group::Expr };
-
- p.start_group(group, TokenMode::Code);
- expr_with(p, true, 0);
- if stmt && p.success() && !p.eof() {
- p.expected_at("semicolon or line break");
- }
- p.end_group();
- }
+ // Line-based markup that is not currently at the start of the line.
+ NodeKind::Eq | NodeKind::ListBullet | NodeKind::EnumNumbering(_) => {
+ p.convert(NodeKind::Text(p.peek_src().into()))
+ }
- // Block and template.
- NodeKind::LeftBrace => {
- block(p);
- }
- NodeKind::LeftBracket => {
- template(p);
+ // Hashtag + keyword / identifier.
+ NodeKind::Ident(_)
+ | NodeKind::Let
+ | NodeKind::If
+ | NodeKind::While
+ | NodeKind::For
+ | NodeKind::Import
+ | NodeKind::Include => {
+ let stmt = matches!(token, NodeKind::Let | NodeKind::Import);
+ let group = if stmt { Group::Stmt } else { Group::Expr };
+
+ p.start_group(group, TokenMode::Code);
+ expr_with(p, true, 0);
+ if stmt && p.success() && !p.eof() {
+ p.expected_at("semicolon or line break");
}
+ p.end_group();
+ }
- // Comments.
- NodeKind::LineComment | NodeKind::BlockComment => {
- p.eat();
- }
+ // Block and template.
+ NodeKind::LeftBrace => block(p),
+ NodeKind::LeftBracket => template(p),
- _ => {
- *at_start = false;
- p.unexpected();
- }
- };
- }
+ // Comments.
+ NodeKind::LineComment | NodeKind::BlockComment => p.eat(),
+
+ _ => {
+ *at_start = false;
+ p.unexpected();
+ }
+ };
}
/// Parse a heading.
fn heading(p: &mut Parser) {
p.start();
p.start();
- p.eat_assert(NodeKind::Eq);
+ p.eat_assert(&NodeKind::Eq);
// Count depth.
let mut level: usize = 1;
- while p.eat_if(NodeKind::Eq) {
+ while p.eat_if(&NodeKind::Eq) {
level += 1;
}
@@ -200,7 +193,7 @@ fn heading(p: &mut Parser) {
/// Parse a single list item.
fn list_node(p: &mut Parser) {
p.start();
- p.eat_assert(NodeKind::ListBullet);
+ p.eat_assert(&NodeKind::ListBullet);
let column = p.column(p.prev_end());
markup_indented(p, column);
p.end(NodeKind::List);
@@ -209,9 +202,7 @@ fn list_node(p: &mut Parser) {
/// Parse a single enum item.
fn enum_node(p: &mut Parser) {
p.start();
- if !matches!(p.eat(), Some(NodeKind::EnumNumbering(_))) {
- panic!("enum item does not start with numbering")
- };
+ p.eat();
let column = p.column(p.prev_end());
markup_indented(p, column);
p.end(NodeKind::Enum);
@@ -263,7 +254,7 @@ fn expr_with(p: &mut Parser, atomic: bool, min_prec: usize) {
continue;
}
- if p.peek() == Some(NodeKind::With) {
+ if p.peek() == Some(&NodeKind::With) {
with_expr(p, p.child_count() - offset);
if p.may_lift_abort() {
@@ -276,7 +267,7 @@ fn expr_with(p: &mut Parser, atomic: bool, min_prec: usize) {
break;
}
- let op = match p.peek().as_ref().and_then(BinOp::from_token) {
+ let op = match p.peek().and_then(BinOp::from_token) {
Some(binop) => binop,
None => {
p.lift();
@@ -286,10 +277,8 @@ fn expr_with(p: &mut Parser, atomic: bool, min_prec: usize) {
let mut prec = op.precedence();
if prec < min_prec {
- {
- p.lift();
- break;
- };
+ p.lift();
+ break;
}
p.eat();
@@ -324,7 +313,7 @@ fn primary(p: &mut Parser, atomic: bool) {
p.eat();
// Arrow means this is a closure's lone parameter.
- if !atomic && p.peek() == Some(NodeKind::Arrow) {
+ if !atomic && p.peek() == Some(&NodeKind::Arrow) {
p.end_and_start_with(NodeKind::ClosureParams);
p.eat();
@@ -359,10 +348,9 @@ fn primary(p: &mut Parser, atomic: bool) {
/// Parse a literal.
fn literal(p: &mut Parser) -> bool {
- let peeked = if let Some(p) = p.peek() {
- p
- } else {
- return false;
+ let peeked = match p.peek() {
+ Some(x) => x.clone(),
+ None => return false,
};
match peeked {
@@ -375,18 +363,14 @@ fn literal(p: &mut Parser) -> bool {
| NodeKind::Fraction(_)
| NodeKind::Length(_, _)
| NodeKind::Angle(_, _)
- | NodeKind::Percentage(_) => {
- p.eat();
- }
+ | NodeKind::Percentage(_) => p.eat(),
NodeKind::Str(s) => {
p.eat();
if !s.terminated {
p.expected_at("quote");
}
}
- _ => {
- return false;
- }
+ _ => return false,
}
true
@@ -401,7 +385,7 @@ fn parenthesized(p: &mut Parser) {
let offset = p.child_count();
p.start();
p.start_group(Group::Paren, TokenMode::Code);
- let colon = p.eat_if(NodeKind::Colon);
+ let colon = p.eat_if(&NodeKind::Colon);
let kind = collection(p).0;
p.end_group();
let token_count = p.child_count() - offset;
@@ -414,12 +398,12 @@ fn parenthesized(p: &mut Parser) {
}
// Arrow means this is a closure's parameter list.
- if p.peek() == Some(NodeKind::Arrow) {
+ if p.peek() == Some(&NodeKind::Arrow) {
p.start_with(token_count);
params(p, 0, true);
p.end(NodeKind::ClosureParams);
- p.eat_assert(NodeKind::Arrow);
+ p.eat_assert(&NodeKind::Arrow);
expr(p);
@@ -485,7 +469,7 @@ fn collection(p: &mut Parser) -> (CollectionKind, usize) {
break;
}
- if p.eat_if(NodeKind::Comma) {
+ if p.eat_if(&NodeKind::Comma) {
has_comma = true;
} else {
missing_coma = Some(p.child_count());
@@ -518,7 +502,7 @@ enum CollectionItemKind {
/// Parse an expression or a named pair. Returns if this is a named pair.
fn item(p: &mut Parser) -> CollectionItemKind {
p.start();
- if p.eat_if(NodeKind::Dots) {
+ if p.eat_if(&NodeKind::Dots) {
expr(p);
p.end_or_abort(NodeKind::ParameterSink);
@@ -531,7 +515,7 @@ fn item(p: &mut Parser) -> CollectionItemKind {
return CollectionItemKind::Unnamed;
}
- if p.eat_if(NodeKind::Colon) {
+ if p.eat_if(&NodeKind::Colon) {
let child = p.child(1).unwrap();
if matches!(child.kind(), &NodeKind::Ident(_)) {
expr(p);
@@ -686,9 +670,9 @@ fn args(p: &mut Parser, allow_template: bool) {
/// Parse a with expression.
fn with_expr(p: &mut Parser, preserve: usize) {
p.start_with(preserve);
- p.eat_assert(NodeKind::With);
+ p.eat_assert(&NodeKind::With);
- if p.peek() == Some(NodeKind::LeftParen) {
+ if p.peek() == Some(&NodeKind::LeftParen) {
args(p, false);
p.end(NodeKind::WithExpr);
} else {
@@ -700,7 +684,7 @@ fn with_expr(p: &mut Parser, preserve: usize) {
/// Parse a let expression.
fn let_expr(p: &mut Parser) {
p.start();
- p.eat_assert(NodeKind::Let);
+ p.eat_assert(&NodeKind::Let);
let offset = p.child_count();
ident(p);
@@ -708,7 +692,7 @@ fn let_expr(p: &mut Parser) {
return;
}
- if p.peek() == Some(NodeKind::With) {
+ if p.peek() == Some(&NodeKind::With) {
with_expr(p, p.child_count() - offset);
} else {
// If a parenthesis follows, this is a function definition.
@@ -725,7 +709,7 @@ fn let_expr(p: &mut Parser) {
false
};
- if p.eat_if(NodeKind::Eq) {
+ if p.eat_if(&NodeKind::Eq) {
expr(p);
} else if has_params {
// Function definitions must have a body.
@@ -749,7 +733,7 @@ fn let_expr(p: &mut Parser) {
/// Parse an if expresion.
fn if_expr(p: &mut Parser) {
p.start();
- p.eat_assert(NodeKind::If);
+ p.eat_assert(&NodeKind::If);
expr(p);
if p.may_end_abort(NodeKind::IfExpr) {
@@ -762,8 +746,8 @@ fn if_expr(p: &mut Parser) {
return;
}
- if p.eat_if(NodeKind::Else) {
- if p.peek() == Some(NodeKind::If) {
+ if p.eat_if(&NodeKind::Else) {
+ if p.peek() == Some(&NodeKind::If) {
if_expr(p);
} else {
body(p);
@@ -776,7 +760,7 @@ fn if_expr(p: &mut Parser) {
/// Parse a while expresion.
fn while_expr(p: &mut Parser) {
p.start();
- p.eat_assert(NodeKind::While);
+ p.eat_assert(&NodeKind::While);
expr(p);
@@ -793,7 +777,7 @@ fn while_expr(p: &mut Parser) {
/// Parse a for expression.
fn for_expr(p: &mut Parser) {
p.start();
- p.eat_assert(NodeKind::For);
+ p.eat_assert(&NodeKind::For);
for_pattern(p);
@@ -801,7 +785,7 @@ fn for_expr(p: &mut Parser) {
return;
}
- if p.eat_expect(NodeKind::In) {
+ if p.eat_expect(&NodeKind::In) {
expr(p);
if p.may_end_abort(NodeKind::ForExpr) {
@@ -828,7 +812,7 @@ fn for_pattern(p: &mut Parser) {
return;
}
- if p.peek() == Some(NodeKind::Comma) {
+ if p.peek() == Some(&NodeKind::Comma) {
p.eat();
ident(p);
@@ -844,9 +828,9 @@ fn for_pattern(p: &mut Parser) {
/// Parse an import expression.
fn import_expr(p: &mut Parser) {
p.start();
- p.eat_assert(NodeKind::Import);
+ p.eat_assert(&NodeKind::Import);
- if !p.eat_if(NodeKind::Star) {
+ if !p.eat_if(&NodeKind::Star) {
// This is the list of identifiers scenario.
p.start();
p.start_group(Group::Imports, TokenMode::Code);
@@ -865,7 +849,7 @@ fn import_expr(p: &mut Parser) {
p.end(NodeKind::ImportItems);
};
- if p.eat_expect(NodeKind::From) {
+ if p.eat_expect(&NodeKind::From) {
expr(p);
}
@@ -875,7 +859,7 @@ fn import_expr(p: &mut Parser) {
/// Parse an include expression.
fn include_expr(p: &mut Parser) {
p.start();
- p.eat_assert(NodeKind::Include);
+ p.eat_assert(&NodeKind::Include);
expr(p);
p.end(NodeKind::IncludeExpr);
@@ -883,11 +867,12 @@ fn include_expr(p: &mut Parser) {
/// Parse an identifier.
fn ident(p: &mut Parser) {
- if let Some(NodeKind::Ident(_)) = p.peek() {
- p.eat();
- } else {
- p.expected("identifier");
- p.unsuccessful();
+ match p.peek() {
+ Some(NodeKind::Ident(_)) => p.eat(),
+ _ => {
+ p.expected("identifier");
+ p.unsuccessful();
+ }
}
}
diff --git a/src/parse/parser.rs b/src/parse/parser.rs
index f62e882a..e6fcc1ae 100644
--- a/src/parse/parser.rs
+++ b/src/parse/parser.rs
@@ -161,7 +161,7 @@ impl<'s> Parser<'s> {
let len = children.iter().map(|c| c.len()).sum();
self.children
- .push(GreenNode::with_children(kind, len, children.into_iter()).into());
+ .push(GreenNode::with_children(kind, len, children).into());
self.children.extend(remains);
self.success = true;
}
@@ -240,10 +240,9 @@ impl<'s> Parser<'s> {
}
pub fn finish(&mut self) -> Rc<GreenNode> {
- if let Green::Node(n) = self.children.pop().unwrap() {
- n
- } else {
- panic!()
+ match self.children.pop().unwrap() {
+ Green::Node(n) => n,
+ _ => panic!(),
}
}
@@ -252,16 +251,16 @@ impl<'s> Parser<'s> {
self.peek().is_none()
}
- pub fn eat(&mut self) -> Option<NodeKind> {
- let token = self.peek()?;
- self.bump();
+ fn eat_peeked(&mut self) -> Option<NodeKind> {
+ let token = self.peek()?.clone();
+ self.eat();
Some(token)
}
/// Consume the next token if it is the given one.
- pub fn eat_if(&mut self, t: NodeKind) -> bool {
+ pub fn eat_if(&mut self, t: &NodeKind) -> bool {
if self.peek() == Some(t) {
- self.bump();
+ self.eat();
true
} else {
false
@@ -271,36 +270,36 @@ impl<'s> Parser<'s> {
/// Consume the next token if the closure maps it a to `Some`-variant.
pub fn eat_map<T, F>(&mut self, f: F) -> Option<T>
where
- F: FnOnce(NodeKind) -> Option<T>,
+ F: FnOnce(&NodeKind) -> Option<T>,
{
let token = self.peek()?;
let mapped = f(token);
if mapped.is_some() {
- self.bump();
+ self.eat();
}
mapped
}
/// Consume the next token if it is the given one and produce an error if
/// not.
- pub fn eat_expect(&mut self, t: NodeKind) -> bool {
- let eaten = self.eat_if(t.clone());
+ pub fn eat_expect(&mut self, t: &NodeKind) -> bool {
+ let eaten = self.eat_if(t);
if !eaten {
- self.expected_at(&t.to_string());
+ self.expected_at(t.as_str());
}
eaten
}
/// Consume the next token, debug-asserting that it is one of the given ones.
- pub fn eat_assert(&mut self, t: NodeKind) {
- let next = self.eat();
- debug_assert_eq!(next, Some(t));
+ pub fn eat_assert(&mut self, t: &NodeKind) {
+ let next = self.eat_peeked();
+ debug_assert_eq!(next.as_ref(), Some(t));
}
/// Consume tokens while the condition is true.
pub fn eat_while<F>(&mut self, mut f: F)
where
- F: FnMut(NodeKind) -> bool,
+ F: FnMut(&NodeKind) -> bool,
{
while self.peek().map_or(false, |t| f(t)) {
self.eat();
@@ -308,8 +307,8 @@ impl<'s> Parser<'s> {
}
/// Peek at the next token without consuming it.
- pub fn peek(&self) -> Option<NodeKind> {
- self.peeked.clone()
+ pub fn peek(&self) -> Option<&NodeKind> {
+ self.peeked.as_ref()
}
/// Peek at the next token if it follows immediately after the last one
@@ -371,9 +370,9 @@ impl<'s> Parser<'s> {
self.repeek();
match kind {
- Group::Paren => self.eat_assert(NodeKind::LeftParen),
- Group::Bracket => self.eat_assert(NodeKind::LeftBracket),
- Group::Brace => self.eat_assert(NodeKind::LeftBrace),
+ Group::Paren => self.eat_assert(&NodeKind::LeftParen),
+ Group::Bracket => self.eat_assert(&NodeKind::LeftBracket),
+ Group::Brace => self.eat_assert(&NodeKind::LeftBrace),
Group::Stmt => {}
Group::Expr => {}
Group::Imports => {}
@@ -402,11 +401,11 @@ impl<'s> Parser<'s> {
} {
if self.next == Some(end.clone()) {
// Bump the delimeter and return. No need to rescan in this case.
- self.bump();
+ self.eat();
rescan = false;
} else if required {
self.start();
- self.abort(format!("expected {}", end.to_string()));
+ self.abort(format!("expected {}", end));
}
}
@@ -457,21 +456,21 @@ impl<'s> Parser<'s> {
/// Eat the next token and add an error that it is not the expected `thing`.
pub fn expected(&mut self, what: &str) {
self.start();
- if let Some(found) = self.eat() {
- self.abort(format!("expected {}, found {}", what, found.to_string()))
- } else {
- self.lift();
- self.expected_at(what);
+ match self.eat_peeked() {
+ Some(found) => self.abort(format!("expected {}, found {}", what, found)),
+ None => {
+ self.lift();
+ self.expected_at(what);
+ }
}
}
/// Eat the next token and add an error that it is unexpected.
pub fn unexpected(&mut self) {
self.start();
- if let Some(found) = self.eat() {
- self.abort(format!("unexpected {}", found.to_string()))
- } else {
- self.abort("unexpected end of file")
+ match self.eat_peeked() {
+ Some(found) => self.abort(format!("unexpected {}", found)),
+ None => self.abort("unexpected end of file"),
}
}
@@ -489,7 +488,7 @@ impl<'s> Parser<'s> {
}
/// Move to the next token.
- fn bump(&mut self) {
+ pub fn eat(&mut self) {
self.children.push(
GreenData::new(
self.next.clone().unwrap(),
@@ -511,7 +510,7 @@ impl<'s> Parser<'s> {
if self.tokens.mode() == TokenMode::Code {
// Skip whitespace and comments.
while self.next.as_ref().map_or(false, |x| self.skip_type(x)) {
- self.bump();
+ self.eat();
}
}
diff --git a/src/parse/resolve.rs b/src/parse/resolve.rs
index c59c3bb1..1b3089a6 100644
--- a/src/parse/resolve.rs
+++ b/src/parse/resolve.rs
@@ -25,11 +25,9 @@ pub fn resolve_string(string: &str) -> EcoString {
let sequence = s.eat_while(|c| c.is_ascii_hexdigit());
let _terminated = s.eat_if('}');
- if let Some(c) = resolve_hex(sequence) {
- out.push(c);
- } else {
- // TODO: Feedback that unicode escape sequence is wrong.
- out.push_str(s.eaten_from(start));
+ match resolve_hex(sequence) {
+ Some(c) => out.push(c),
+ None => out.push_str(s.eaten_from(start)),
}
}
diff --git a/src/parse/tokens.rs b/src/parse/tokens.rs
index 19d0d77b..bfd9f3ed 100644
--- a/src/parse/tokens.rs
+++ b/src/parse/tokens.rs
@@ -224,8 +224,8 @@ impl<'s> Tokens<'s> {
}
fn backslash(&mut self) -> NodeKind {
- if let Some(c) = self.s.peek() {
- match c {
+ match self.s.peek() {
+ Some(c) => match c {
// Backslash and comments.
'\\' | '/' |
// Parenthesis and hashtag.
@@ -247,9 +247,8 @@ impl<'s> Tokens<'s> {
}
c if c.is_whitespace() => NodeKind::Linebreak,
_ => NodeKind::Text("\\".into()),
- }
- } else {
- NodeKind::Linebreak
+ },
+ None => NodeKind::Linebreak,
}
}
@@ -257,10 +256,9 @@ impl<'s> Tokens<'s> {
fn hash(&mut self) -> NodeKind {
if self.s.check_or(false, is_id_start) {
let read = self.s.eat_while(is_id_continue);
- if let Some(keyword) = keyword(read) {
- keyword
- } else {
- NodeKind::Ident(read.into())
+ match keyword(read) {
+ Some(keyword) => keyword,
+ None => NodeKind::Ident(read.into()),
}
} else {
NodeKind::Text("#".into())