Skip to content

Commit c31c51f

Browse files
committed
Add Mamdani inference and aggregation modules
Introduces Mamdani inference engine (src/mamdani.rs), rule aggregation (src/aggregate.rs), and related test coverage. Refactors and renames src/rule.rs to src/antecedent.rs, removes src/system.rs, and updates src/lib.rs to include new modules. Enhances UniformSampler and Sampler trait, exposes Variable.terms, and adds a domain() helper. Updates Cargo.toml to add and enable the 'inference-mamdani' feature by default.
1 parent f819b62 commit c31c51f

File tree

10 files changed

+390
-16
lines changed

10 files changed

+390
-16
lines changed

Cargo.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ version = "0.1.0"
44
edition = "2024"
55

66
[features]
7-
default = ["f64", "ops-dyn"]
7+
default = ["f64", "ops-dyn", "inference-mamdani"]
88
f32 = []
99
f64 = []
1010
serde = ["dep:serde"]
@@ -13,6 +13,7 @@ ops-minmax = []
1313
ops-product = []
1414
ops-lukasiewicz = []
1515
ops-dyn = [] # enables runtime enum/trait-object API
16+
inference-mamdani = []
1617

1718
[profile.release]
1819
debug = true

src/aggregate.rs

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
//Agregation acros sets of rules
2+
3+
use crate::{error::FuzzyError, mamdani::Rule, prelude::*, sampler, variable::Variable};
4+
use std::{borrow::Borrow, collections::HashMap, hash::Hash, result};
5+
6+
pub fn elements_max(data: &mut Vec<Float>, src: &Vec<Float>) {
7+
for (d, s) in data.iter_mut().zip(src) {
8+
*d = d.max(*s)
9+
}
10+
}
11+
12+
pub fn aggregation<KI, KV>(
13+
rules: Vec<Rule>,
14+
input: &HashMap<KI, Float>,
15+
vars: &HashMap<KV, Variable>,
16+
sampler: UniformSampler,
17+
) -> Result<HashMap<String, Vec<Float>>>
18+
where
19+
KI: Eq + Hash + Borrow<str>,
20+
KV: Eq + Hash + Borrow<str>,
21+
{
22+
let mut implicated_map: HashMap<String, Vec<Float>> = HashMap::new();
23+
for i in 0..rules.len() {
24+
let alpha = rules[i].activation(&input, &vars)?;
25+
let implicated = rules[i].implicate(alpha, vars, &sampler)?;
26+
27+
for (k, v) in implicated {
28+
implicated_map
29+
.entry(k)
30+
.and_modify(|cur| elements_max(cur, &v))
31+
.or_insert(v);
32+
}
33+
}
34+
35+
return Ok(implicated_map);
36+
}

src/rule.rs renamed to src/antecedent.rs

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -116,13 +116,13 @@ mod tests {
116116
inputs.insert("temp", 7.5);
117117

118118
// AST: (temp is hot) AND NOT (temp is cold)
119-
let ast = crate::rule::Antecedent::And(
120-
Box::new(crate::rule::Antecedent::Atom {
119+
let ast = crate::antecedent::Antecedent::And(
120+
Box::new(crate::antecedent::Antecedent::Atom {
121121
var: "temp".into(),
122122
term: "hot".into(),
123123
}),
124-
Box::new(crate::rule::Antecedent::Not(Box::new(
125-
crate::rule::Antecedent::Atom {
124+
Box::new(crate::antecedent::Antecedent::Not(Box::new(
125+
crate::antecedent::Antecedent::Atom {
126126
var: "temp".into(),
127127
term: "cold".into(),
128128
},
@@ -134,13 +134,13 @@ mod tests {
134134
let cold = Triangular::new(-10.0, -5.0, 0.0).unwrap().eval(7.5);
135135
let expected = hot.min(1.0 - cold);
136136

137-
let y = crate::rule::eval_antecedent(&ast, &inputs, &vars).unwrap();
137+
let y = crate::antecedent::eval_antecedent(&ast, &inputs, &vars).unwrap();
138138
assert!((y - expected).abs() < eps);
139139
}
140140

141141
// RED: OR behavior using the same variable at a different crisp value.
142142
#[test]
143-
fn red_antecedent_or_behavior() {
143+
fn antecedent_or_behavior() {
144144
// Variable setup
145145
let mut temp = Variable::new(-10.0, 10.0).unwrap();
146146
temp.insert_term(
@@ -161,12 +161,12 @@ mod tests {
161161
inputs.insert("temp", -5.0);
162162

163163
// AST: (temp is cold) OR (temp is hot)
164-
let ast = crate::rule::Antecedent::Or(
165-
Box::new(crate::rule::Antecedent::Atom {
164+
let ast = crate::antecedent::Antecedent::Or(
165+
Box::new(crate::antecedent::Antecedent::Atom {
166166
var: "temp".into(),
167167
term: "cold".into(),
168168
}),
169-
Box::new(crate::rule::Antecedent::Atom {
169+
Box::new(crate::antecedent::Antecedent::Atom {
170170
var: "temp".into(),
171171
term: "hot".into(),
172172
}),
@@ -177,7 +177,7 @@ mod tests {
177177
let hot = Triangular::new(0.0, 5.0, 10.0).unwrap().eval(-5.0);
178178
let expected = cold.max(hot);
179179

180-
let y = crate::rule::eval_antecedent(&ast, &inputs, &vars).unwrap();
180+
let y = crate::antecedent::eval_antecedent(&ast, &inputs, &vars).unwrap();
181181
assert!((y - expected).abs() < crate::Float::EPSILON);
182182
}
183183
}

src/lib.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,14 @@
11
pub mod membership;
22

33
//Temporary Module Decleration to avoid error
4+
pub mod aggregate;
5+
pub mod antecedent;
46
pub mod builder;
57
pub mod defuzz;
68
pub mod error;
9+
pub mod mamdani;
710
pub mod ops;
8-
pub mod rule;
911
pub mod sampler;
10-
pub mod system;
1112
pub mod term;
1213
pub mod variable;
1314

src/mamdani.rs

Lines changed: 86 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,86 @@
1+
use std::{borrow::Borrow, collections::HashMap, hash::Hash};
2+
3+
#[cfg(feature = "inference-mamdani")]
4+
use crate::sampler;
5+
use crate::{
6+
antecedent::{Antecedent, eval_antecedent},
7+
error::{FuzzyError, MissingSpace},
8+
prelude::*,
9+
sampler::UniformSampler,
10+
variable::Variable,
11+
};
12+
13+
pub enum Implication {
14+
Clip,
15+
Product,
16+
}
17+
18+
pub struct Consequent {
19+
pub var: String,
20+
pub term: String,
21+
//pub weight: Float,
22+
//pub imp: Implication,
23+
}
24+
25+
pub struct Rule {
26+
pub antecedent: Antecedent,
27+
pub consequent: Vec<Consequent>,
28+
}
29+
30+
//Mamdani Inference Engine
31+
#[cfg(feature = "inference-mamdani")]
32+
impl Rule {
33+
pub fn activation<KI, KV>(
34+
&self,
35+
input: &HashMap<KI, Float>,
36+
vars: &HashMap<KV, Variable>,
37+
) -> Result<Float>
38+
where
39+
KI: Eq + Hash + Borrow<str>,
40+
KV: Eq + Hash + Borrow<str>,
41+
{
42+
eval_antecedent(&self.antecedent, input, vars)
43+
}
44+
45+
pub fn implicate<KV>(
46+
&self,
47+
alpha: Float,
48+
vers: &HashMap<KV, Variable>,
49+
sampler: &UniformSampler,
50+
) -> Result<HashMap<String, Vec<Float>>>
51+
where
52+
KV: Eq + Hash + Borrow<str>,
53+
{
54+
let mut result_map: HashMap<String, Vec<Float>> = HashMap::new();
55+
56+
for i in 0..self.consequent.len() {
57+
let mut result_vec = vec![0.0; sampler.n];
58+
59+
let (dom_min, dom_max) = vers
60+
.get(&self.consequent[i].var.as_str())
61+
.ok_or(FuzzyError::NotFound {
62+
space: MissingSpace::Var,
63+
key: self.consequent[i].term.clone(),
64+
})?
65+
.domain();
66+
67+
let step = (dom_max - dom_min) / sampler.n as Float;
68+
69+
for k in 0..sampler.n {
70+
let x = dom_min + (k as Float * step);
71+
result_vec[k] = vers
72+
.get(&self.consequent[i].var.as_str())
73+
.ok_or(FuzzyError::NotFound {
74+
space: MissingSpace::Var,
75+
key: self.consequent[i].term.clone(),
76+
})?
77+
.eval(&self.consequent[i].term, x)?
78+
.min(alpha);
79+
}
80+
81+
result_map.insert(self.consequent[i].var.to_string(), result_vec);
82+
}
83+
return Ok(result_map);
84+
//TODO: Return type should be hashmap<string, Vec<Float>> where string signifies the variable(eg "fanspeed")
85+
}
86+
}

src/prelude.rs

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,3 +24,6 @@ pub use crate::ops::FuzzyOps;
2424

2525
// Term wrapper around a boxed membership function
2626
pub use crate::term::Term;
27+
28+
//UniformSampling Functionality
29+
pub use crate::sampler::{Sampler, UniformSampler};

src/sampler.rs

Lines changed: 122 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1 +1,123 @@
1+
use crate::{Float, error::FuzzyError, prelude::*};
12

3+
pub trait Sampler {
4+
//Trait shape: Returning Result<Vec<Float>> is fine.
5+
//If you later need performance, consider an iterator or a small wrapper type, but not necessary now.
6+
fn sample(&self, min: Float, max: Float) -> Result<Vec<Float>>;
7+
}
8+
9+
pub struct UniformSampler {
10+
pub n: usize,
11+
}
12+
13+
impl Default for UniformSampler {
14+
fn default() -> Self {
15+
Self { n: Self::DEFAULT_N }
16+
}
17+
}
18+
19+
impl UniformSampler {
20+
pub const DEFAULT_N: usize = 101;
21+
22+
pub fn new(n: usize) -> Result<Self> {
23+
if n < 2 {
24+
return Err(FuzzyError::OutOfBounds);
25+
}
26+
Ok(Self { n: n })
27+
}
28+
}
29+
30+
impl Sampler for UniformSampler {
31+
fn sample(&self, min: Float, max: Float) -> Result<Vec<Float>> {
32+
if min >= max {
33+
return Err(FuzzyError::BadArity);
34+
}
35+
36+
if !(min.is_finite() && max.is_finite()) {
37+
return Err(FuzzyError::BadArity);
38+
}
39+
40+
let n = self.n;
41+
let mut sample: Vec<Float> = Vec::with_capacity(n);
42+
let step = (max - min) / (n as Float - 1.0);
43+
44+
for i in 0..n {
45+
sample.push(min + i as Float * step)
46+
}
47+
sample[n - 1] = max;
48+
49+
Ok(sample)
50+
}
51+
}
52+
53+
#[cfg(test)]
54+
mod tests {
55+
use crate::Float;
56+
use crate::error::FuzzyError;
57+
use crate::sampler::{Sampler, UniformSampler};
58+
59+
#[test]
60+
fn uniform_sampler_two_points_inclusive_endpoints() {
61+
let s = UniformSampler::new(2).unwrap();
62+
let min: Float = -3.5;
63+
let max: Float = 4.5;
64+
let pts = s.sample(min, max).unwrap();
65+
assert_eq!(pts.len(), 2);
66+
assert_eq!(pts[0], min);
67+
assert_eq!(pts[1], max, "Last point must equal max for n=2");
68+
}
69+
70+
#[test]
71+
fn uniform_sampler_inclusive_endpoints_default() {
72+
let s = UniformSampler::default();
73+
let n = UniformSampler::DEFAULT_N;
74+
let min: Float = -5.0;
75+
let max: Float = 5.0;
76+
let pts = s.sample(min, max).unwrap();
77+
assert_eq!(pts.len(), n);
78+
assert_eq!(pts.first().copied().unwrap(), min);
79+
assert_eq!(
80+
pts.last().copied().unwrap(),
81+
max,
82+
"Sampler should include max exactly"
83+
);
84+
}
85+
86+
#[test]
87+
fn uniform_sampler_spacing_monotonic() {
88+
let s = UniformSampler::default();
89+
let min: Float = 0.0;
90+
let max: Float = 10.0;
91+
let pts = s.sample(min, max).unwrap();
92+
assert!(pts.windows(2).all(|w| w[1] >= w[0]));
93+
94+
// Check approximate uniform spacing consistency across interior points
95+
let eps = Float::EPSILON * 10.0;
96+
let base_step = pts[1] - pts[0];
97+
for i in 2..pts.len() {
98+
let step = pts[i] - pts[i - 1];
99+
assert!((step - base_step).abs() <= eps, "Non-uniform step at i={i}");
100+
}
101+
}
102+
103+
#[test]
104+
fn uniform_sampler_invalid_points_rejected() {
105+
assert!(matches!(
106+
UniformSampler::new(0),
107+
Err(FuzzyError::OutOfBounds)
108+
));
109+
assert!(matches!(
110+
UniformSampler::new(1),
111+
Err(FuzzyError::OutOfBounds)
112+
));
113+
}
114+
115+
#[test]
116+
fn uniform_sampler_invalid_range_rejected() {
117+
let s = UniformSampler::default();
118+
// min > max must error
119+
assert!(matches!(s.sample(1.0, 0.0), Err(FuzzyError::BadArity)));
120+
// Degenerate range should be rejected for a sampler that requires >=2 distinct points
121+
assert!(matches!(s.sample(1.0, 1.0), Err(_)));
122+
}
123+
}

src/system.rs

Lines changed: 0 additions & 1 deletion
This file was deleted.

src/variable.rs

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ pub struct Variable {
1313
max: Float,
1414

1515
/// Mapping from term name to its labeled membership function wrapper.
16-
terms: HashMap<String, Term>,
16+
pub terms: HashMap<String, Term>,
1717
}
1818
impl Variable {
1919
/// Constructs a new variable, validating that `min < max`.
@@ -72,8 +72,13 @@ impl Variable {
7272
}
7373
}
7474

75+
/// Returns the range of domain for the membership function.(term)
76+
///
77+
/// - retunrs `(min, max)`
78+
pub fn domain(&self) -> (Float, Float) {
79+
(self.min, self.max)
80+
}
7581
//Optional helpers:
76-
//pub fn domain(&self) -> (Float, Float)
7782
//pub fn names(&self) -> impl Iterator<Item=&str>
7883
//pub fn fuzzify(&self, x: Float) -> crate::error::Result<Vec<(String, Float)>> to get all memberships at x.
7984
}

0 commit comments

Comments
 (0)