From 5a20acf5957e33d97f3499b0e9b1bf28c2c19527 Mon Sep 17 00:00:00 2001 From: Adrien Burgun Date: Tue, 11 Apr 2023 22:17:49 +0200 Subject: [PATCH] :sparkles: Add NeuraNetwork --- src/algebra.rs | 32 ++++++++++ src/{ => derivable}/activation.rs | 31 ++++----- src/derivable/loss.rs | 22 +++++++ src/derivable/mod.rs | 20 ++++++ src/layer.rs | 9 --- src/layer/dense.rs | 14 ++-- src/layer/mod.rs | 22 +++++++ src/lib.rs | 7 +- src/network.rs | 103 ++++++++++++++++++++++++++++++ src/utils.rs | 42 +++++++++++- 10 files changed, 266 insertions(+), 36 deletions(-) create mode 100644 src/algebra.rs rename src/{ => derivable}/activation.rs (54%) create mode 100644 src/derivable/loss.rs create mode 100644 src/derivable/mod.rs delete mode 100644 src/layer.rs create mode 100644 src/layer/mod.rs create mode 100644 src/network.rs diff --git a/src/algebra.rs b/src/algebra.rs new file mode 100644 index 0000000..dc0d71d --- /dev/null +++ b/src/algebra.rs @@ -0,0 +1,32 @@ +/// An extension of `std::ops::AddAssign` +pub trait NeuraAddAssign { + fn add_assign(&mut self, other: &Self); +} + +impl NeuraAddAssign for (Left, Right) { + fn add_assign(&mut self, other: &Self) { + NeuraAddAssign::add_assign(&mut self.0, &other.0); + NeuraAddAssign::add_assign(&mut self.1, &other.1); + } +} + +impl NeuraAddAssign for [T; N] { + fn add_assign(&mut self, other: &[T; N]) { + for i in 0..N { + NeuraAddAssign::add_assign(&mut self[i], &other[i]); + } + } +} + +macro_rules! base { + ( $type:ty ) => { + impl NeuraAddAssign for $type { + fn add_assign(&mut self, other: &Self) { + std::ops::AddAssign::add_assign(self, other); + } + } + } +} + +base!(f32); +base!(f64); diff --git a/src/activation.rs b/src/derivable/activation.rs similarity index 54% rename from src/activation.rs rename to src/derivable/activation.rs index 9e5e4ed..0a3cd1c 100644 --- a/src/activation.rs +++ b/src/derivable/activation.rs @@ -1,27 +1,11 @@ -pub trait Activation { - fn eval(&self, input: f64) -> f64; - - fn eval_f32(&self, input: f32) -> f32 { - self.eval(input as f64) as f32 - } - - fn derivate(&self, at: f64) -> f64; - - fn derivate_f32(&self, at: f32) -> f32 { - self.derivate(at as f64) as f32 - } -} +use super::NeuraDerivable; #[derive(Clone, Copy, Debug, PartialEq)] pub struct Relu; -impl Activation for Relu { - #[inline(always)] - fn eval(&self, input: f64) -> f64 { - input.max(0.0) - } +impl NeuraDerivable for Relu { #[inline(always)] - fn eval_f32(&self, input: f32) -> f32 { + fn eval(&self, input: f64) -> f64 { input.max(0.0) } @@ -33,9 +17,16 @@ impl Activation for Relu { 0.0 } } +} + +impl NeuraDerivable for Relu { + #[inline(always)] + fn eval(&self, input: f32) -> f32 { + input.max(0.0) + } #[inline(always)] - fn derivate_f32(&self, input: f32) -> f32 { + fn derivate(&self, input: f32) -> f32 { if input > 0.0 { 1.0 } else { diff --git a/src/derivable/loss.rs b/src/derivable/loss.rs new file mode 100644 index 0000000..3e35dcd --- /dev/null +++ b/src/derivable/loss.rs @@ -0,0 +1,22 @@ +use super::NeuraLoss; + +#[derive(Clone, Copy, Debug, PartialEq)] +pub struct Euclidean; +impl NeuraLoss<[f64; N]> for Euclidean { + type Out = f64; + type Target = [f64; N]; + + fn eval(&self, target: [f64; N], actual: [f64; N]) -> f64 { + let mut sum_squared = 0.0; + + for i in 0..N { + sum_squared += (target[i] - actual[i]) * (target[i] - actual[i]); + } + + sum_squared * 0.5 + } + + fn nabla(&self, target: [f64; N], actual: [f64; N]) -> [f64; N] { + todo!() + } +} diff --git a/src/derivable/mod.rs b/src/derivable/mod.rs new file mode 100644 index 0000000..5c3db62 --- /dev/null +++ b/src/derivable/mod.rs @@ -0,0 +1,20 @@ +pub mod activation; +pub mod loss; + +pub trait NeuraDerivable { + fn eval(&self, input: F) -> F; + + /// Should return the derivative of `self.eval(input)` + fn derivate(&self, at: F) -> F; +} + +pub trait NeuraLoss { + type Out; + type Target; + + fn eval(&self, target: Self::Target, actual: F) -> Self::Out; + + /// Should return the gradient of the loss function according to `actual` + /// ($\nabla_{\texttt{actual}} \texttt{self.eval}(\texttt{target}, \texttt{actual})$). + fn nabla(&self, target: Self::Target, actual: F) -> F; +} diff --git a/src/layer.rs b/src/layer.rs deleted file mode 100644 index 4db5de4..0000000 --- a/src/layer.rs +++ /dev/null @@ -1,9 +0,0 @@ -mod dense; -pub use dense::NeuraDenseLayer; - -pub trait NeuraLayer { - type Input; - type Output; - - fn eval(&self, input: &Self::Input) -> Self::Output; -} diff --git a/src/layer/dense.rs b/src/layer/dense.rs index a18ee9d..c7762b2 100644 --- a/src/layer/dense.rs +++ b/src/layer/dense.rs @@ -1,14 +1,18 @@ use super::NeuraLayer; -use crate::{activation::Activation, utils::multiply_matrix_vector}; +use crate::{derivable::NeuraDerivable, utils::multiply_matrix_vector}; use rand::Rng; -pub struct NeuraDenseLayer { +pub struct NeuraDenseLayer< + Act: NeuraDerivable, + const INPUT_LEN: usize, + const OUTPUT_LEN: usize, +> { weights: [[f64; INPUT_LEN]; OUTPUT_LEN], bias: [f64; OUTPUT_LEN], activation: Act, } -impl +impl, const INPUT_LEN: usize, const OUTPUT_LEN: usize> NeuraDenseLayer { pub fn new( @@ -43,7 +47,7 @@ impl } } -impl NeuraLayer +impl, const INPUT_LEN: usize, const OUTPUT_LEN: usize> NeuraLayer for NeuraDenseLayer { type Input = [f64; INPUT_LEN]; @@ -64,7 +68,7 @@ impl NeuraLaye #[cfg(test)] mod test { use super::*; - use crate::activation::Relu; + use crate::derivable::activation::Relu; #[test] fn test_from_rng() { diff --git a/src/layer/mod.rs b/src/layer/mod.rs new file mode 100644 index 0000000..d10964b --- /dev/null +++ b/src/layer/mod.rs @@ -0,0 +1,22 @@ +mod dense; +pub use dense::NeuraDenseLayer; + +pub trait NeuraLayer { + type Input; + type Output; + + fn eval(&self, input: &Self::Input) -> Self::Output; +} + +#[macro_export] +macro_rules! neura_layer { + ( "dense", $activation:expr, $output:expr ) => { + NeuraDenseLayer::from_rng(&mut rand::thread_rng(), $activation) + as NeuraDenseLayer<_, _, $output> + }; + + ( "dense", $activation:expr, $output:expr, $input:expr ) => { + NeuraDenseLayer::from_rng(&mut rand::thread_rng(), $activation) + as NeuraDenseLayer<_, $input, $output> + }; +} diff --git a/src/lib.rs b/src/lib.rs index f2a5301..dcdf856 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,3 +1,8 @@ -pub mod activation; +#![feature(generic_arg_infer)] + +pub mod derivable; pub mod layer; +pub mod network; +pub mod algebra; + mod utils; diff --git a/src/network.rs b/src/network.rs new file mode 100644 index 0000000..6a1cbde --- /dev/null +++ b/src/network.rs @@ -0,0 +1,103 @@ +use crate::{layer::NeuraLayer, train::NeuraTrainable}; + +pub struct NeuraNetwork { + layer: Layer, + child_network: ChildNetwork, +} + +impl NeuraNetwork { + pub fn new(layer: Layer, child_network: ChildNetwork) -> Self { + Self { + layer, + child_network, + } + } + + pub fn new_match_output(layer: Layer, child_network: ChildNetwork) -> Self + where + ChildNetwork: NeuraLayer, + { + Self::new(layer, child_network) + } + + pub fn child_network(&self) -> &ChildNetwork { + &self.child_network + } +} + +impl From for NeuraNetwork { + fn from(layer: Layer) -> Self { + Self { + layer, + child_network: (), + } + } +} + +impl NeuraLayer for NeuraNetwork { + type Input = Layer::Input; + type Output = Layer::Output; + + fn eval(&self, input: &Self::Input) -> Self::Output { + self.layer.eval(input) + } +} + +impl> NeuraLayer + for NeuraNetwork +{ + type Input = Layer::Input; + + type Output = ChildNetwork::Output; + + fn eval(&self, input: &Self::Input) -> Self::Output { + self.child_network.eval(&self.layer.eval(input)) + } +} + +#[macro_export] +macro_rules! neura_network { + [] => { + () + }; + + [ $layer:expr $(,)? ] => { + NeuraNetwork::from($layer) + }; + + [ $first:expr, $($rest:expr),+ $(,)? ] => { + NeuraNetwork::new_match_output($first, neura_network![$($rest),+]) + }; +} + +#[cfg(test)] +mod test { + use crate::{derivable::activation::Relu, layer::NeuraDenseLayer, neura_layer}; + + use super::*; + + #[test] + fn test_neura_network_macro() { + let mut rng = rand::thread_rng(); + + let _ = neura_network![ + NeuraDenseLayer::from_rng(&mut rng, Relu) as NeuraDenseLayer<_, 8, 16>, + NeuraDenseLayer::from_rng(&mut rng, Relu) as NeuraDenseLayer<_, _, 12>, + NeuraDenseLayer::from_rng(&mut rng, Relu) as NeuraDenseLayer<_, _, 2> + ]; + + let _ = + neura_network![NeuraDenseLayer::from_rng(&mut rng, Relu) as NeuraDenseLayer<_, 8, 16>,]; + + let _ = neura_network![ + NeuraDenseLayer::from_rng(&mut rng, Relu) as NeuraDenseLayer<_, 8, 16>, + NeuraDenseLayer::from_rng(&mut rng, Relu) as NeuraDenseLayer<_, _, 12>, + ]; + + let _ = neura_network![ + neura_layer!("dense", Relu, 16, 8), + neura_layer!("dense", Relu, 12), + neura_layer!("dense", Relu, 2) + ]; + } +} diff --git a/src/utils.rs b/src/utils.rs index 39413aa..aeffdc7 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,4 +1,4 @@ -pub fn multiply_matrix_vector( +pub(crate) fn multiply_matrix_vector( matrix: &[[f64; WIDTH]; HEIGHT], vector: &[f64; WIDTH], ) -> [f64; HEIGHT] { @@ -14,3 +14,43 @@ pub fn multiply_matrix_vector( result } + +pub(crate) fn assign_add_vector(sum: &mut [f64; N], operand: &[f64; N]) { + for i in 0..N { + sum[i] += operand[i]; + } +} + +pub(crate) fn chunked( + iter: I, + chunk_size: usize, +) -> impl Iterator> { + struct Chunked { + iter: J, + chunk_size: usize, + } + + impl Iterator for Chunked { + type Item = Vec; + + fn next(&mut self) -> Option { + let mut result = Vec::with_capacity(self.chunk_size); + + for _ in 0..self.chunk_size { + if let Some(item) = self.iter.next() { + result.push(item); + } else { + break; + } + } + + if result.len() > 0 { + Some(result) + } else { + None + } + } + } + + Chunked { iter, chunk_size } +}