1
0
Fork 0

Move parser util functions into separate module

This commit is contained in:
Vili Sinervä 2025-01-29 16:34:18 +02:00
parent 101437d9fe
commit 4c085e89d4
No known key found for this signature in database
GPG key ID: DF8FEAF54EFAC996

View file

@ -1,5 +1,6 @@
use crate::compiler::{ use crate::compiler::{
ast::Expression::{self, *}, ast::Expression::{self, *},
parser::parser_utilities::*,
token::{Token, TokenType}, token::{Token, TokenType},
}; };
@ -131,19 +132,22 @@ fn parse_identifier<'source>(pos: &mut usize, tokens: &[Token<'source>]) -> Expr
Identifier(token.text) Identifier(token.text)
} }
fn consume_string<'source>( mod parser_utilities {
use super::*;
pub fn consume_string<'source>(
pos: &mut usize, pos: &mut usize,
tokens: &[Token<'source>], tokens: &[Token<'source>],
expected_string: &str, expected_string: &str,
) -> Token<'source> { ) -> Token<'source> {
consume_strings(pos, tokens, &[expected_string]) consume_strings(pos, tokens, &[expected_string])
} }
fn consume_strings<'source>( pub fn consume_strings<'source>(
pos: &mut usize, pos: &mut usize,
tokens: &[Token<'source>], tokens: &[Token<'source>],
strings: &[&str], strings: &[&str],
) -> Token<'source> { ) -> Token<'source> {
let token = consume(pos, tokens); let token = consume(pos, tokens);
if strings.contains(&token.text) { if strings.contains(&token.text) {
@ -154,21 +158,21 @@ fn consume_strings<'source>(
strings, token strings, token
); );
} }
} }
fn consume_type<'source>( pub fn consume_type<'source>(
pos: &mut usize, pos: &mut usize,
tokens: &[Token<'source>], tokens: &[Token<'source>],
expected_type: TokenType, expected_type: TokenType,
) -> Token<'source> { ) -> Token<'source> {
consume_types(pos, tokens, &[expected_type]) consume_types(pos, tokens, &[expected_type])
} }
fn consume_types<'source>( pub fn consume_types<'source>(
pos: &mut usize, pos: &mut usize,
tokens: &[Token<'source>], tokens: &[Token<'source>],
types: &[TokenType], types: &[TokenType],
) -> Token<'source> { ) -> Token<'source> {
let token = consume(pos, tokens); let token = consume(pos, tokens);
if types.contains(&token.token_type) { if types.contains(&token.token_type) {
@ -179,15 +183,15 @@ fn consume_types<'source>(
types, token types, token
); );
} }
} }
fn consume<'source>(pos: &mut usize, tokens: &[Token<'source>]) -> Token<'source> { pub fn consume<'source>(pos: &mut usize, tokens: &[Token<'source>]) -> Token<'source> {
let token = peek(pos, tokens); let token = peek(pos, tokens);
*pos += 1; *pos += 1;
token token
} }
fn peek<'source>(pos: &mut usize, tokens: &[Token<'source>]) -> Token<'source> { pub fn peek<'source>(pos: &mut usize, tokens: &[Token<'source>]) -> Token<'source> {
if let Some(token) = tokens.get(*pos) { if let Some(token) = tokens.get(*pos) {
token.clone() token.clone()
} else if let Some(last_token) = tokens.get(*pos - 1) { } else if let Some(last_token) = tokens.get(*pos - 1) {
@ -195,6 +199,7 @@ fn peek<'source>(pos: &mut usize, tokens: &[Token<'source>]) -> Token<'source> {
} else { } else {
panic!("Input to parser appears to be empty!"); panic!("Input to parser appears to be empty!");
} }
}
} }
#[cfg(test)] #[cfg(test)]