satisfy clippy
This commit is contained in:
parent
46ca5ecc50
commit
982d0767e4
6 changed files with 62 additions and 70 deletions
|
@ -1,4 +1,3 @@
|
||||||
use ll_grammar::Skippable;
|
|
||||||
use rcompiler::prelude::*;
|
use rcompiler::prelude::*;
|
||||||
use regex::Match;
|
use regex::Match;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
|
@ -102,7 +102,8 @@ impl<
|
||||||
// content of the vec:
|
// content of the vec:
|
||||||
// - first element: all of them combined represent the complete stack, of the parser.
|
// - first element: all of them combined represent the complete stack, of the parser.
|
||||||
// - secount element: rule has to able to derive the code defined, by its inner childs and the unparsed code from the accompanying first element.
|
// - secount element: rule has to able to derive the code defined, by its inner childs and the unparsed code from the accompanying first element.
|
||||||
let mut stack: Vec<(Vec<Sentential<N,T>>, ParseTree<N, S>)> = vec![(
|
type StackElem<N, T, S> = (Vec<Sentential<N, T>>, ParseTree<N, S>);
|
||||||
|
let mut stack: Vec<StackElem<N, T, S>> = vec![(
|
||||||
vec![Sentential::NoneTerminal(self.grammar.start.clone())],
|
vec![Sentential::NoneTerminal(self.grammar.start.clone())],
|
||||||
ParseTree::new(None),
|
ParseTree::new(None),
|
||||||
)];
|
)];
|
||||||
|
@ -142,10 +143,10 @@ impl<
|
||||||
// take next none terminal and apply rule from parse table.
|
// take next none terminal and apply rule from parse table.
|
||||||
Some(Sentential::NoneTerminal(none_term)) => {
|
Some(Sentential::NoneTerminal(none_term)) => {
|
||||||
// load rule
|
// load rule
|
||||||
let Some((id, new_rule)) = self
|
let Some((id, new_rule)) = self.grammar.ll_parse_table(
|
||||||
.grammar
|
&none_term,
|
||||||
.ll_parse_table(&none_term, &next.as_ref().map(|f| f.clone().into()))
|
&next.as_ref().map(|f| f.clone().into()),
|
||||||
else {
|
) else {
|
||||||
// no rule
|
// no rule
|
||||||
return Err(format!(
|
return Err(format!(
|
||||||
"Unexpected token: {}",
|
"Unexpected token: {}",
|
||||||
|
@ -155,8 +156,7 @@ impl<
|
||||||
};
|
};
|
||||||
|
|
||||||
// reverse rule: because, uses vec as stack, but reversed
|
// reverse rule: because, uses vec as stack, but reversed
|
||||||
let new_rule_rev =
|
let new_rule_rev = new_rule.iter().rev().cloned().collect::<Vec<_>>();
|
||||||
new_rule.iter().rev().map(|f| f.clone()).collect::<Vec<_>>();
|
|
||||||
// memorize current state/rule for later
|
// memorize current state/rule for later
|
||||||
stack.push(state.unwrap());
|
stack.push(state.unwrap());
|
||||||
// process next rule
|
// process next rule
|
||||||
|
@ -177,7 +177,7 @@ impl<
|
||||||
return Ok(state.unwrap().1);
|
return Ok(state.unwrap().1);
|
||||||
}
|
}
|
||||||
// still code left, but not excepted
|
// still code left, but not excepted
|
||||||
return Err(format!("Expected end of file."));
|
return Err("Expected end of file.".to_string());
|
||||||
};
|
};
|
||||||
last.1.childs.push(NodeChild::Child(state.unwrap().1));
|
last.1.childs.push(NodeChild::Child(state.unwrap().1));
|
||||||
}
|
}
|
||||||
|
@ -185,7 +185,7 @@ impl<
|
||||||
}
|
}
|
||||||
// should not be possible, because every other path pushes to the stack back or returns
|
// should not be possible, because every other path pushes to the stack back or returns
|
||||||
None => {
|
None => {
|
||||||
return Err(format!("Err: EOS"));
|
return Err("Err: EOS".to_string());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,8 +6,13 @@ use std::{
|
||||||
|
|
||||||
use super::{Grammar, Sentential};
|
use super::{Grammar, Sentential};
|
||||||
|
|
||||||
|
pub type RL0Automaton<N, T> =
|
||||||
|
HashMap<Rc<LR0State<N, T>>, Vec<(Sentential<N, T>, Weak<LR0State<N, T>>)>>;
|
||||||
|
|
||||||
|
pub type RL0Rule<N, T> = (N, Vec<Sentential<N, T>>, usize);
|
||||||
|
|
||||||
#[derive(Debug, Eq, PartialEq)]
|
#[derive(Debug, Eq, PartialEq)]
|
||||||
pub struct LR0State<N: Hash + Eq, T: Hash + Eq>(HashSet<(N, Vec<Sentential<N, T>>, usize)>);
|
pub struct LR0State<N: Hash + Eq, T: Hash + Eq>(HashSet<RL0Rule<N, T>>);
|
||||||
|
|
||||||
impl<N: Hash + Eq + Clone, T: Hash + Eq + Clone> LR0State<N, T> {
|
impl<N: Hash + Eq + Clone, T: Hash + Eq + Clone> LR0State<N, T> {
|
||||||
pub fn next_kernel(&self, read: &Sentential<N, T>) -> Self {
|
pub fn next_kernel(&self, read: &Sentential<N, T>) -> Self {
|
||||||
|
@ -31,7 +36,7 @@ impl<N: Hash + Eq + Clone, T: Hash + Eq + Clone> LR0State<N, T> {
|
||||||
}
|
}
|
||||||
impl<N: Hash + Eq + Ord, T: Hash + Eq + Ord> Hash for LR0State<N, T> {
|
impl<N: Hash + Eq + Ord, T: Hash + Eq + Ord> Hash for LR0State<N, T> {
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
let mut a: Vec<&(N, Vec<Sentential<N, T>>, usize)> = self.0.iter().collect();
|
let mut a: Vec<&RL0Rule<N, T>> = self.0.iter().collect();
|
||||||
a.sort();
|
a.sort();
|
||||||
for s in a.iter() {
|
for s in a.iter() {
|
||||||
s.hash(state);
|
s.hash(state);
|
||||||
|
@ -69,8 +74,7 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn gen_lr0_automaton(&mut self) {
|
pub fn gen_lr0_automaton(&mut self) {
|
||||||
let mut out: HashMap<Rc<LR0State<N, T>>, Vec<(Sentential<N, T>, Weak<LR0State<N, T>>)>> =
|
let mut out: RL0Automaton<N, T> = HashMap::new();
|
||||||
HashMap::new();
|
|
||||||
let mut start_state = LR0State(HashSet::new());
|
let mut start_state = LR0State(HashSet::new());
|
||||||
if let Some(rule) = self.rules.get(&self.start) {
|
if let Some(rule) = self.rules.get(&self.start) {
|
||||||
for to in rule {
|
for to in rule {
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
use std::{
|
use std::{
|
||||||
collections::{HashMap, HashSet},
|
collections::{HashMap, HashSet},
|
||||||
hash::Hash,
|
hash::Hash,
|
||||||
rc::{Rc, Weak},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use lr0::LR0State;
|
use lr0::RL0Automaton;
|
||||||
|
|
||||||
pub mod ll_grammar;
|
pub mod ll_grammar;
|
||||||
pub mod lr0;
|
pub mod lr0;
|
||||||
|
@ -83,8 +82,7 @@ pub struct Grammar<N: PartialEq + Eq + Hash + Clone, T: PartialEq + Eq + Hash +
|
||||||
/// Graph, defined throw this adjacent list.
|
/// Graph, defined throw this adjacent list.
|
||||||
/// - key: states
|
/// - key: states
|
||||||
/// - value: list with read symbol and linked node.
|
/// - value: list with read symbol and linked node.
|
||||||
pub lr0_automaton:
|
pub lr0_automaton: Option<RL0Automaton<N, T>>,
|
||||||
Option<HashMap<Rc<LR0State<N, T>>, Vec<(Sentential<N, T>, Weak<LR0State<N, T>>)>>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<N: PartialEq + Eq + Hash + Clone, T: PartialEq + Eq + Hash + Clone> Grammar<N, T> {
|
impl<N: PartialEq + Eq + Hash + Clone, T: PartialEq + Eq + Hash + Clone> Grammar<N, T> {
|
||||||
|
@ -93,7 +91,7 @@ impl<N: PartialEq + Eq + Hash + Clone, T: PartialEq + Eq + Hash + Clone> Grammar
|
||||||
Sentential::Terminal(_) => false,
|
Sentential::Terminal(_) => false,
|
||||||
Sentential::NoneTerminal(nt) => self
|
Sentential::NoneTerminal(nt) => self
|
||||||
.rules
|
.rules
|
||||||
.get(&nt)
|
.get(nt)
|
||||||
.map(|f| f.iter().any(|v| v.is_empty()))
|
.map(|f| f.iter().any(|v| v.is_empty()))
|
||||||
.unwrap_or(false),
|
.unwrap_or(false),
|
||||||
}
|
}
|
||||||
|
@ -189,7 +187,7 @@ impl<N: PartialEq + Eq + Hash + Clone, T: PartialEq + Eq + Hash + Clone> Grammar
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn gen_follow(&mut self) {
|
pub fn gen_follow(&mut self) {
|
||||||
if self.first == None {
|
if self.first.is_none() {
|
||||||
self.gen_first();
|
self.gen_first();
|
||||||
}
|
}
|
||||||
let mut follow: HashMap<N, HashSet<Option<T>>> = HashMap::new();
|
let mut follow: HashMap<N, HashSet<Option<T>>> = HashMap::new();
|
||||||
|
@ -204,9 +202,8 @@ impl<N: PartialEq + Eq + Hash + Clone, T: PartialEq + Eq + Hash + Clone> Grammar
|
||||||
// and if A -> aBb and e in First(b) add Follow(A) to Follow(B)
|
// and if A -> aBb and e in First(b) add Follow(A) to Follow(B)
|
||||||
if to.len() >= 2 {
|
if to.len() >= 2 {
|
||||||
for i in 0..(to.len() - 1) {
|
for i in 0..(to.len() - 1) {
|
||||||
let slice = to[i + 1..].iter().map(|f| f.clone()).collect::<Vec<_>>();
|
let slice = to[i + 1..].to_vec();
|
||||||
match to.get(i) {
|
if let Some(Sentential::NoneTerminal(b)) = to.get(i) {
|
||||||
Some(Sentential::NoneTerminal(b)) => {
|
|
||||||
let mut set = self.first(&slice);
|
let mut set = self.first(&slice);
|
||||||
if set.contains(&None) {
|
if set.contains(&None) {
|
||||||
if let Some(set) = follow.get(from).cloned() {
|
if let Some(set) = follow.get(from).cloned() {
|
||||||
|
@ -236,14 +233,11 @@ impl<N: PartialEq + Eq + Hash + Clone, T: PartialEq + Eq + Hash + Clone> Grammar
|
||||||
set
|
set
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// b
|
// b
|
||||||
// and if A -> aB add Follow(A) to Follow(B)
|
// and if A -> aB add Follow(A) to Follow(B)
|
||||||
match to.last() {
|
if let Some(Sentential::NoneTerminal(b)) = to.last() {
|
||||||
Some(Sentential::NoneTerminal(b)) => {
|
|
||||||
if let Some(set) = follow.get(from).cloned() {
|
if let Some(set) = follow.get(from).cloned() {
|
||||||
follow
|
follow
|
||||||
.entry(b.clone())
|
.entry(b.clone())
|
||||||
|
@ -258,8 +252,6 @@ impl<N: PartialEq + Eq + Hash + Clone, T: PartialEq + Eq + Hash + Clone> Grammar
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -275,7 +267,7 @@ impl<N: PartialEq + Eq + Hash + Clone, T: PartialEq + Eq + Hash + Clone> Grammar
|
||||||
self.follow
|
self.follow
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.get(&none_termianl)
|
.get(none_termianl)
|
||||||
.cloned()
|
.cloned()
|
||||||
.unwrap_or(HashSet::new())
|
.unwrap_or(HashSet::new())
|
||||||
}
|
}
|
||||||
|
|
|
@ -94,13 +94,13 @@ token_scanner!(
|
||||||
Some(Dot)
|
Some(Dot)
|
||||||
}
|
}
|
||||||
r"^[0-9]+.[0-9]*" : |_, m: Match<'_>| {
|
r"^[0-9]+.[0-9]*" : |_, m: Match<'_>| {
|
||||||
m.as_str().parse::<_>().ok().map(|f| Float(f))
|
m.as_str().parse::<_>().ok().map(Float)
|
||||||
}
|
}
|
||||||
r#"^"(([^"\\]|(\\[a-z\\"]))*)""# : |capture: regex::Captures<'_>, _| {
|
r#"^"(([^"\\]|(\\[a-z\\"]))*)""# : |capture: regex::Captures<'_>, _| {
|
||||||
capture.get(1).map(|m| Str(m.as_str().to_string()))
|
capture.get(1).map(|m| Str(m.as_str().to_string()))
|
||||||
}
|
}
|
||||||
r"^[0-9]+" : |_, m: Match<'_>| {
|
r"^[0-9]+" : |_, m: Match<'_>| {
|
||||||
m.as_str().parse::<_>().ok().map(|i| Int(i))
|
m.as_str().parse::<_>().ok().map(Int)
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -124,10 +124,7 @@ enum NoneTerminals {
|
||||||
impl Skippable for NoneTerminals {
|
impl Skippable for NoneTerminals {
|
||||||
fn skippable(&self) -> bool {
|
fn skippable(&self) -> bool {
|
||||||
use NoneTerminals::*;
|
use NoneTerminals::*;
|
||||||
match self {
|
matches!(self, P | Li | Ei | Si | Ti | Fi | IF | Sem)
|
||||||
P | Li | Ei | Si | Ti | Fi | IF | Sem => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@ macro_rules! token_scanner {
|
||||||
$regex:tt : $code:expr
|
$regex:tt : $code:expr
|
||||||
)*) => {
|
)*) => {
|
||||||
impl $crate::scanner::MatchNext<$name> for $name {
|
impl $crate::scanner::MatchNext<$name> for $name {
|
||||||
fn match_next(code: &String) -> Option<(Self, usize)> {
|
fn match_next(code: &str) -> Option<(Self, usize)> {
|
||||||
use $name::*;
|
use $name::*;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
$(
|
$(
|
||||||
|
@ -25,7 +25,7 @@ macro_rules! token_scanner {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait MatchNext<T> {
|
pub trait MatchNext<T> {
|
||||||
fn match_next(code: &String) -> Option<(T, usize)>;
|
fn match_next(code: &str) -> Option<(T, usize)>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Scanner<T: MatchNext<T> + PartialEq> {
|
pub struct Scanner<T: MatchNext<T> + PartialEq> {
|
||||||
|
|
Loading…
Reference in a new issue