parent
7759a6615d
commit
5a20acf595
@ -0,0 +1,32 @@
|
||||
/// An extension of `std::ops::AddAssign`
|
||||
pub trait NeuraAddAssign {
|
||||
fn add_assign(&mut self, other: &Self);
|
||||
}
|
||||
|
||||
impl<Left: NeuraAddAssign, Right: NeuraAddAssign> NeuraAddAssign for (Left, Right) {
|
||||
fn add_assign(&mut self, other: &Self) {
|
||||
NeuraAddAssign::add_assign(&mut self.0, &other.0);
|
||||
NeuraAddAssign::add_assign(&mut self.1, &other.1);
|
||||
}
|
||||
}
|
||||
|
||||
impl<const N: usize, T: NeuraAddAssign> NeuraAddAssign for [T; N] {
|
||||
fn add_assign(&mut self, other: &[T; N]) {
|
||||
for i in 0..N {
|
||||
NeuraAddAssign::add_assign(&mut self[i], &other[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! base {
|
||||
( $type:ty ) => {
|
||||
impl NeuraAddAssign for $type {
|
||||
fn add_assign(&mut self, other: &Self) {
|
||||
std::ops::AddAssign::add_assign(self, other);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
base!(f32);
|
||||
base!(f64);
|
@ -0,0 +1,22 @@
|
||||
use super::NeuraLoss;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
pub struct Euclidean;
|
||||
impl<const N: usize> NeuraLoss<[f64; N]> for Euclidean {
|
||||
type Out = f64;
|
||||
type Target = [f64; N];
|
||||
|
||||
fn eval(&self, target: [f64; N], actual: [f64; N]) -> f64 {
|
||||
let mut sum_squared = 0.0;
|
||||
|
||||
for i in 0..N {
|
||||
sum_squared += (target[i] - actual[i]) * (target[i] - actual[i]);
|
||||
}
|
||||
|
||||
sum_squared * 0.5
|
||||
}
|
||||
|
||||
fn nabla(&self, target: [f64; N], actual: [f64; N]) -> [f64; N] {
|
||||
todo!()
|
||||
}
|
||||
}
|
@ -0,0 +1,20 @@
|
||||
pub mod activation;
|
||||
pub mod loss;
|
||||
|
||||
pub trait NeuraDerivable<F> {
|
||||
fn eval(&self, input: F) -> F;
|
||||
|
||||
/// Should return the derivative of `self.eval(input)`
|
||||
fn derivate(&self, at: F) -> F;
|
||||
}
|
||||
|
||||
pub trait NeuraLoss<F> {
|
||||
type Out;
|
||||
type Target;
|
||||
|
||||
fn eval(&self, target: Self::Target, actual: F) -> Self::Out;
|
||||
|
||||
/// Should return the gradient of the loss function according to `actual`
|
||||
/// ($\nabla_{\texttt{actual}} \texttt{self.eval}(\texttt{target}, \texttt{actual})$).
|
||||
fn nabla(&self, target: Self::Target, actual: F) -> F;
|
||||
}
|
@ -1,9 +0,0 @@
|
||||
mod dense;
|
||||
pub use dense::NeuraDenseLayer;
|
||||
|
||||
pub trait NeuraLayer {
|
||||
type Input;
|
||||
type Output;
|
||||
|
||||
fn eval(&self, input: &Self::Input) -> Self::Output;
|
||||
}
|
@ -0,0 +1,22 @@
|
||||
mod dense;
|
||||
pub use dense::NeuraDenseLayer;
|
||||
|
||||
pub trait NeuraLayer {
|
||||
type Input;
|
||||
type Output;
|
||||
|
||||
fn eval(&self, input: &Self::Input) -> Self::Output;
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! neura_layer {
|
||||
( "dense", $activation:expr, $output:expr ) => {
|
||||
NeuraDenseLayer::from_rng(&mut rand::thread_rng(), $activation)
|
||||
as NeuraDenseLayer<_, _, $output>
|
||||
};
|
||||
|
||||
( "dense", $activation:expr, $output:expr, $input:expr ) => {
|
||||
NeuraDenseLayer::from_rng(&mut rand::thread_rng(), $activation)
|
||||
as NeuraDenseLayer<_, $input, $output>
|
||||
};
|
||||
}
|
@ -1,3 +1,8 @@
|
||||
pub mod activation;
|
||||
#![feature(generic_arg_infer)]
|
||||
|
||||
pub mod derivable;
|
||||
pub mod layer;
|
||||
pub mod network;
|
||||
pub mod algebra;
|
||||
|
||||
mod utils;
|
||||
|
@ -0,0 +1,103 @@
|
||||
use crate::{layer::NeuraLayer, train::NeuraTrainable};
|
||||
|
||||
pub struct NeuraNetwork<Layer: NeuraLayer, ChildNetwork> {
|
||||
layer: Layer,
|
||||
child_network: ChildNetwork,
|
||||
}
|
||||
|
||||
impl<Layer: NeuraLayer, ChildNetwork> NeuraNetwork<Layer, ChildNetwork> {
|
||||
pub fn new(layer: Layer, child_network: ChildNetwork) -> Self {
|
||||
Self {
|
||||
layer,
|
||||
child_network,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_match_output(layer: Layer, child_network: ChildNetwork) -> Self
|
||||
where
|
||||
ChildNetwork: NeuraLayer<Input = Layer::Output>,
|
||||
{
|
||||
Self::new(layer, child_network)
|
||||
}
|
||||
|
||||
pub fn child_network(&self) -> &ChildNetwork {
|
||||
&self.child_network
|
||||
}
|
||||
}
|
||||
|
||||
impl<Layer: NeuraLayer> From<Layer> for NeuraNetwork<Layer, ()> {
|
||||
fn from(layer: Layer) -> Self {
|
||||
Self {
|
||||
layer,
|
||||
child_network: (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Layer: NeuraLayer> NeuraLayer for NeuraNetwork<Layer, ()> {
|
||||
type Input = Layer::Input;
|
||||
type Output = Layer::Output;
|
||||
|
||||
fn eval(&self, input: &Self::Input) -> Self::Output {
|
||||
self.layer.eval(input)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Layer: NeuraLayer, ChildNetwork: NeuraLayer<Input = Layer::Output>> NeuraLayer
|
||||
for NeuraNetwork<Layer, ChildNetwork>
|
||||
{
|
||||
type Input = Layer::Input;
|
||||
|
||||
type Output = ChildNetwork::Output;
|
||||
|
||||
fn eval(&self, input: &Self::Input) -> Self::Output {
|
||||
self.child_network.eval(&self.layer.eval(input))
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! neura_network {
|
||||
[] => {
|
||||
()
|
||||
};
|
||||
|
||||
[ $layer:expr $(,)? ] => {
|
||||
NeuraNetwork::from($layer)
|
||||
};
|
||||
|
||||
[ $first:expr, $($rest:expr),+ $(,)? ] => {
|
||||
NeuraNetwork::new_match_output($first, neura_network![$($rest),+])
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::{derivable::activation::Relu, layer::NeuraDenseLayer, neura_layer};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_neura_network_macro() {
|
||||
let mut rng = rand::thread_rng();
|
||||
|
||||
let _ = neura_network![
|
||||
NeuraDenseLayer::from_rng(&mut rng, Relu) as NeuraDenseLayer<_, 8, 16>,
|
||||
NeuraDenseLayer::from_rng(&mut rng, Relu) as NeuraDenseLayer<_, _, 12>,
|
||||
NeuraDenseLayer::from_rng(&mut rng, Relu) as NeuraDenseLayer<_, _, 2>
|
||||
];
|
||||
|
||||
let _ =
|
||||
neura_network![NeuraDenseLayer::from_rng(&mut rng, Relu) as NeuraDenseLayer<_, 8, 16>,];
|
||||
|
||||
let _ = neura_network![
|
||||
NeuraDenseLayer::from_rng(&mut rng, Relu) as NeuraDenseLayer<_, 8, 16>,
|
||||
NeuraDenseLayer::from_rng(&mut rng, Relu) as NeuraDenseLayer<_, _, 12>,
|
||||
];
|
||||
|
||||
let _ = neura_network![
|
||||
neura_layer!("dense", Relu, 16, 8),
|
||||
neura_layer!("dense", Relu, 12),
|
||||
neura_layer!("dense", Relu, 2)
|
||||
];
|
||||
}
|
||||
}
|
Loading…
Reference in new issue