From 7759a6615d5bbf249dcead4411f19a1c6942b32a Mon Sep 17 00:00:00 2001 From: Adrien Burgun Date: Tue, 11 Apr 2023 16:15:34 +0200 Subject: [PATCH] :tada: First commit --- .gitignore | 2 ++ Cargo.toml | 10 ++++++ src/activation.rs | 45 ++++++++++++++++++++++++++ src/layer.rs | 9 ++++++ src/layer/dense.rs | 79 ++++++++++++++++++++++++++++++++++++++++++++++ src/lib.rs | 3 ++ src/utils.rs | 16 ++++++++++ 7 files changed, 164 insertions(+) create mode 100644 .gitignore create mode 100644 Cargo.toml create mode 100644 src/activation.rs create mode 100644 src/layer.rs create mode 100644 src/layer/dense.rs create mode 100644 src/lib.rs create mode 100644 src/utils.rs diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..4fffb2f --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +/target +/Cargo.lock diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..ec6baa3 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "neuramethyst" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +ndarray = "^0.15" +rand = "^0.8" diff --git a/src/activation.rs b/src/activation.rs new file mode 100644 index 0000000..9e5e4ed --- /dev/null +++ b/src/activation.rs @@ -0,0 +1,45 @@ +pub trait Activation { + fn eval(&self, input: f64) -> f64; + + fn eval_f32(&self, input: f32) -> f32 { + self.eval(input as f64) as f32 + } + + fn derivate(&self, at: f64) -> f64; + + fn derivate_f32(&self, at: f32) -> f32 { + self.derivate(at as f64) as f32 + } +} + +#[derive(Clone, Copy, Debug, PartialEq)] +pub struct Relu; +impl Activation for Relu { + #[inline(always)] + fn eval(&self, input: f64) -> f64 { + input.max(0.0) + } + + #[inline(always)] + fn eval_f32(&self, input: f32) -> f32 { + input.max(0.0) + } + + #[inline(always)] + fn derivate(&self, input: f64) -> f64 { + if input > 0.0 { + 1.0 + } else { + 0.0 + } + } + + #[inline(always)] + fn derivate_f32(&self, input: f32) -> f32 { + if input > 0.0 { + 1.0 + } else { + 0.0 + } + } +} diff --git a/src/layer.rs b/src/layer.rs new file mode 100644 index 0000000..4db5de4 --- /dev/null +++ b/src/layer.rs @@ -0,0 +1,9 @@ +mod dense; +pub use dense::NeuraDenseLayer; + +pub trait NeuraLayer { + type Input; + type Output; + + fn eval(&self, input: &Self::Input) -> Self::Output; +} diff --git a/src/layer/dense.rs b/src/layer/dense.rs new file mode 100644 index 0000000..a18ee9d --- /dev/null +++ b/src/layer/dense.rs @@ -0,0 +1,79 @@ +use super::NeuraLayer; +use crate::{activation::Activation, utils::multiply_matrix_vector}; +use rand::Rng; + +pub struct NeuraDenseLayer { + weights: [[f64; INPUT_LEN]; OUTPUT_LEN], + bias: [f64; OUTPUT_LEN], + activation: Act, +} + +impl + NeuraDenseLayer +{ + pub fn new( + weights: [[f64; INPUT_LEN]; OUTPUT_LEN], + bias: [f64; OUTPUT_LEN], + activation: Act, + ) -> Self { + Self { + weights, + bias, + activation, + } + } + + pub fn from_rng(rng: &mut impl Rng, activation: Act) -> Self { + let mut weights = [[0.0; INPUT_LEN]; OUTPUT_LEN]; + + let multiplier = std::f64::consts::SQRT_2 / (INPUT_LEN as f64).sqrt(); + + for i in 0..OUTPUT_LEN { + for j in 0..INPUT_LEN { + weights[i][j] = rng.gen::() * multiplier; + } + } + + Self { + weights, + // Biases are zero-initialized, as this shouldn't cause any issues during training + bias: [0.0; OUTPUT_LEN], + activation, + } + } +} + +impl NeuraLayer + for NeuraDenseLayer +{ + type Input = [f64; INPUT_LEN]; + + type Output = [f64; OUTPUT_LEN]; + + fn eval(&self, input: &Self::Input) -> Self::Output { + let mut result = multiply_matrix_vector(&self.weights, input); + + for i in 0..OUTPUT_LEN { + result[i] = self.activation.eval(result[i] + self.bias[i]); + } + + result + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::activation::Relu; + + #[test] + fn test_from_rng() { + let mut rng = rand::thread_rng(); + let layer: NeuraDenseLayer<_, 64, 32> = NeuraDenseLayer::from_rng(&mut rng, Relu); + let mut input = [0.0; 64]; + for x in 0..64 { + input[x] = rng.gen(); + } + assert!(layer.eval(&input).len() == 32); + } +} diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..f2a5301 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,3 @@ +pub mod activation; +pub mod layer; +mod utils; diff --git a/src/utils.rs b/src/utils.rs new file mode 100644 index 0000000..39413aa --- /dev/null +++ b/src/utils.rs @@ -0,0 +1,16 @@ +pub fn multiply_matrix_vector( + matrix: &[[f64; WIDTH]; HEIGHT], + vector: &[f64; WIDTH], +) -> [f64; HEIGHT] { + let mut result = [0.0; HEIGHT]; + + for i in 0..HEIGHT { + let mut sum = 0.0; + for k in 0..WIDTH { + sum += matrix[i][k] * vector[k]; + } + result[i] = sum; + } + + result +}