satisfy clippy

This commit is contained in:
jusax23 2024-11-05 16:22:39 +01:00
parent 46ca5ecc50
commit 982d0767e4
Signed by: jusax23
GPG key ID: 499E2AA870C1CD41
6 changed files with 62 additions and 70 deletions

View file

@ -1,4 +1,3 @@
use ll_grammar::Skippable;
use rcompiler::prelude::*;
use regex::Match;
use std::collections::HashMap;

View file

@ -47,7 +47,7 @@ impl<N: PartialEq + Eq + Hash + Clone, T: PartialEq + Eq + Hash + Clone> Grammar
&self,
none_terminal: &N,
terminal: &Option<T>,
) -> Option<(usize, &Vec<Sentential<N,T>>)> {
) -> Option<(usize, &Vec<Sentential<N, T>>)> {
assert!(
self.ll_parse_table.is_some(),
"Please call gen_parse_table before this!"
@ -102,7 +102,8 @@ impl<
// content of the vec:
// - first element: all of them combined represent the complete stack, of the parser.
// - secount element: rule has to able to derive the code defined, by its inner childs and the unparsed code from the accompanying first element.
let mut stack: Vec<(Vec<Sentential<N,T>>, ParseTree<N, S>)> = vec![(
type StackElem<N, T, S> = (Vec<Sentential<N, T>>, ParseTree<N, S>);
let mut stack: Vec<StackElem<N, T, S>> = vec![(
vec![Sentential::NoneTerminal(self.grammar.start.clone())],
ParseTree::new(None),
)];
@ -142,10 +143,10 @@ impl<
// take next none terminal and apply rule from parse table.
Some(Sentential::NoneTerminal(none_term)) => {
// load rule
let Some((id, new_rule)) = self
.grammar
.ll_parse_table(&none_term, &next.as_ref().map(|f| f.clone().into()))
else {
let Some((id, new_rule)) = self.grammar.ll_parse_table(
&none_term,
&next.as_ref().map(|f| f.clone().into()),
) else {
// no rule
return Err(format!(
"Unexpected token: {}",
@ -155,8 +156,7 @@ impl<
};
// reverse rule: because, uses vec as stack, but reversed
let new_rule_rev =
new_rule.iter().rev().map(|f| f.clone()).collect::<Vec<_>>();
let new_rule_rev = new_rule.iter().rev().cloned().collect::<Vec<_>>();
// memorize current state/rule for later
stack.push(state.unwrap());
// process next rule
@ -177,7 +177,7 @@ impl<
return Ok(state.unwrap().1);
}
// still code left, but not excepted
return Err(format!("Expected end of file."));
return Err("Expected end of file.".to_string());
};
last.1.childs.push(NodeChild::Child(state.unwrap().1));
}
@ -185,7 +185,7 @@ impl<
}
// should not be possible, because every other path pushes to the stack back or returns
None => {
return Err(format!("Err: EOS"));
return Err("Err: EOS".to_string());
}
}
}

View file

@ -6,8 +6,13 @@ use std::{
use super::{Grammar, Sentential};
pub type RL0Automaton<N, T> =
HashMap<Rc<LR0State<N, T>>, Vec<(Sentential<N, T>, Weak<LR0State<N, T>>)>>;
pub type RL0Rule<N, T> = (N, Vec<Sentential<N, T>>, usize);
#[derive(Debug, Eq, PartialEq)]
pub struct LR0State<N: Hash + Eq, T: Hash + Eq>(HashSet<(N, Vec<Sentential<N, T>>, usize)>);
pub struct LR0State<N: Hash + Eq, T: Hash + Eq>(HashSet<RL0Rule<N, T>>);
impl<N: Hash + Eq + Clone, T: Hash + Eq + Clone> LR0State<N, T> {
pub fn next_kernel(&self, read: &Sentential<N, T>) -> Self {
@ -31,7 +36,7 @@ impl<N: Hash + Eq + Clone, T: Hash + Eq + Clone> LR0State<N, T> {
}
impl<N: Hash + Eq + Ord, T: Hash + Eq + Ord> Hash for LR0State<N, T> {
fn hash<H: Hasher>(&self, state: &mut H) {
let mut a: Vec<&(N, Vec<Sentential<N, T>>, usize)> = self.0.iter().collect();
let mut a: Vec<&RL0Rule<N, T>> = self.0.iter().collect();
a.sort();
for s in a.iter() {
s.hash(state);
@ -69,8 +74,7 @@ where
}
pub fn gen_lr0_automaton(&mut self) {
let mut out: HashMap<Rc<LR0State<N, T>>, Vec<(Sentential<N, T>, Weak<LR0State<N, T>>)>> =
HashMap::new();
let mut out: RL0Automaton<N, T> = HashMap::new();
let mut start_state = LR0State(HashSet::new());
if let Some(rule) = self.rules.get(&self.start) {
for to in rule {

View file

@ -1,10 +1,9 @@
use std::{
collections::{HashMap, HashSet},
hash::Hash,
rc::{Rc, Weak},
};
use lr0::LR0State;
use lr0::RL0Automaton;
pub mod ll_grammar;
pub mod lr0;
@ -83,8 +82,7 @@ pub struct Grammar<N: PartialEq + Eq + Hash + Clone, T: PartialEq + Eq + Hash +
/// Graph, defined throw this adjacent list.
/// - key: states
/// - value: list with read symbol and linked node.
pub lr0_automaton:
Option<HashMap<Rc<LR0State<N, T>>, Vec<(Sentential<N, T>, Weak<LR0State<N, T>>)>>>,
pub lr0_automaton: Option<RL0Automaton<N, T>>,
}
impl<N: PartialEq + Eq + Hash + Clone, T: PartialEq + Eq + Hash + Clone> Grammar<N, T> {
@ -93,7 +91,7 @@ impl<N: PartialEq + Eq + Hash + Clone, T: PartialEq + Eq + Hash + Clone> Grammar
Sentential::Terminal(_) => false,
Sentential::NoneTerminal(nt) => self
.rules
.get(&nt)
.get(nt)
.map(|f| f.iter().any(|v| v.is_empty()))
.unwrap_or(false),
}
@ -189,7 +187,7 @@ impl<N: PartialEq + Eq + Hash + Clone, T: PartialEq + Eq + Hash + Clone> Grammar
}
pub fn gen_follow(&mut self) {
if self.first == None {
if self.first.is_none() {
self.gen_first();
}
let mut follow: HashMap<N, HashSet<Option<T>>> = HashMap::new();
@ -204,47 +202,25 @@ impl<N: PartialEq + Eq + Hash + Clone, T: PartialEq + Eq + Hash + Clone> Grammar
// and if A -> aBb and e in First(b) add Follow(A) to Follow(B)
if to.len() >= 2 {
for i in 0..(to.len() - 1) {
let slice = to[i + 1..].iter().map(|f| f.clone()).collect::<Vec<_>>();
match to.get(i) {
Some(Sentential::NoneTerminal(b)) => {
let mut set = self.first(&slice);
if set.contains(&None) {
if let Some(set) = follow.get(from).cloned() {
follow
.entry(b.clone())
.and_modify(|e| {
for val in set.iter() {
change |= e.insert(val.clone());
}
})
.or_insert_with(|| {
change = true;
set
});
}
let slice = to[i + 1..].to_vec();
if let Some(Sentential::NoneTerminal(b)) = to.get(i) {
let mut set = self.first(&slice);
if set.contains(&None) {
if let Some(set) = follow.get(from).cloned() {
follow
.entry(b.clone())
.and_modify(|e| {
for val in set.iter() {
change |= e.insert(val.clone());
}
})
.or_insert_with(|| {
change = true;
set
});
}
set.remove(&None);
follow
.entry(b.clone())
.and_modify(|e| {
for val in set.iter() {
change |= e.insert(val.clone());
}
})
.or_insert_with(|| {
change = true;
set
});
}
_ => (),
}
}
}
// b
// and if A -> aB add Follow(A) to Follow(B)
match to.last() {
Some(Sentential::NoneTerminal(b)) => {
if let Some(set) = follow.get(from).cloned() {
set.remove(&None);
follow
.entry(b.clone())
.and_modify(|e| {
@ -258,7 +234,23 @@ impl<N: PartialEq + Eq + Hash + Clone, T: PartialEq + Eq + Hash + Clone> Grammar
});
}
}
_ => (),
}
// b
// and if A -> aB add Follow(A) to Follow(B)
if let Some(Sentential::NoneTerminal(b)) = to.last() {
if let Some(set) = follow.get(from).cloned() {
follow
.entry(b.clone())
.and_modify(|e| {
for val in set.iter() {
change |= e.insert(val.clone());
}
})
.or_insert_with(|| {
change = true;
set
});
}
}
}
}
@ -275,7 +267,7 @@ impl<N: PartialEq + Eq + Hash + Clone, T: PartialEq + Eq + Hash + Clone> Grammar
self.follow
.as_ref()
.unwrap()
.get(&none_termianl)
.get(none_termianl)
.cloned()
.unwrap_or(HashSet::new())
}

View file

@ -94,13 +94,13 @@ token_scanner!(
Some(Dot)
}
r"^[0-9]+.[0-9]*" : |_, m: Match<'_>| {
m.as_str().parse::<_>().ok().map(|f| Float(f))
m.as_str().parse::<_>().ok().map(Float)
}
r#"^"(([^"\\]|(\\[a-z\\"]))*)""# : |capture: regex::Captures<'_>, _| {
capture.get(1).map(|m| Str(m.as_str().to_string()))
}
r"^[0-9]+" : |_, m: Match<'_>| {
m.as_str().parse::<_>().ok().map(|i| Int(i))
m.as_str().parse::<_>().ok().map(Int)
}
);
@ -124,10 +124,7 @@ enum NoneTerminals {
impl Skippable for NoneTerminals {
fn skippable(&self) -> bool {
use NoneTerminals::*;
match self {
P | Li | Ei | Si | Ti | Fi | IF | Sem => true,
_ => false,
}
matches!(self, P | Li | Ei | Si | Ti | Fi | IF | Sem)
}
}

View file

@ -6,7 +6,7 @@ macro_rules! token_scanner {
$regex:tt : $code:expr
)*) => {
impl $crate::scanner::MatchNext<$name> for $name {
fn match_next(code: &String) -> Option<(Self, usize)> {
fn match_next(code: &str) -> Option<(Self, usize)> {
use $name::*;
use regex::Regex;
$(
@ -25,7 +25,7 @@ macro_rules! token_scanner {
}
pub trait MatchNext<T> {
fn match_next(code: &String) -> Option<(T, usize)>;
fn match_next(code: &str) -> Option<(T, usize)>;
}
pub struct Scanner<T: MatchNext<T> + PartialEq> {