🎨 Clean up, move error types to src/err.rs

main
Shad Amethyst 2 years ago
parent 972b177767
commit 38bd61fed5

@ -1,11 +1,12 @@
use std::io::Write; use std::io::Write;
#[allow(unused_imports)]
use nalgebra::{dvector, DVector}; use nalgebra::{dvector, DVector};
#[allow(unused_imports)] #[allow(unused_imports)]
use neuramethyst::derivable::activation::{LeakyRelu, Linear, Relu, Tanh}; use neuramethyst::derivable::activation::{LeakyRelu, Linear, Relu, Tanh};
use neuramethyst::derivable::loss::CrossEntropy; use neuramethyst::derivable::loss::CrossEntropy;
use neuramethyst::derivable::regularize::NeuraL1; use neuramethyst::derivable::regularize::NeuraL1;
use neuramethyst::{plot_losses, prelude::*}; use neuramethyst::{one_hot, plot_losses, prelude::*};
use rand::Rng; use rand::Rng;
@ -132,11 +133,3 @@ fn draw_neuron_activation<F: Fn([f64; 2]) -> Vec<f64>>(callback: F, scale: f64)
viuer::print(&image::DynamicImage::ImageRgb8(image), &config).unwrap(); viuer::print(&image::DynamicImage::ImageRgb8(image), &config).unwrap();
} }
fn one_hot(value: usize, categories: usize) -> DVector<f32> {
let mut res = DVector::from_element(categories, 0.0);
if value < categories {
res[value] = 1.0;
}
res
}

@ -4,7 +4,7 @@ use nalgebra::{dvector, DVector};
use neuramethyst::derivable::activation::Linear; use neuramethyst::derivable::activation::Linear;
use neuramethyst::derivable::loss::CrossEntropy; use neuramethyst::derivable::loss::CrossEntropy;
use neuramethyst::derivable::regularize::NeuraL1; use neuramethyst::derivable::regularize::NeuraL1;
use neuramethyst::{plot_losses, prelude::*}; use neuramethyst::{one_hot, plot_losses, prelude::*};
use rand::Rng; use rand::Rng;
@ -95,11 +95,3 @@ fn main() {
writeln!(&mut file, "{},{},{}", input[0], input[1], guess).unwrap(); writeln!(&mut file, "{},{},{}", input[0], input[1], guess).unwrap();
} }
} }
fn one_hot(value: usize, categories: usize) -> DVector<f32> {
let mut res = DVector::from_element(categories, 0.0);
if value < categories {
res[value] = 1.0;
}
res
}

@ -1,4 +1,5 @@
use nalgebra::DVector; use nalgebra::DVector;
use rand::Rng;
use rust_mnist::Mnist; use rust_mnist::Mnist;
use neuramethyst::{ use neuramethyst::{
@ -7,7 +8,7 @@ use neuramethyst::{
activation::{Linear, Logistic, Relu, Swish, Tanh}, activation::{Linear, Logistic, Relu, Swish, Tanh},
loss::{CrossEntropy, Euclidean}, loss::{CrossEntropy, Euclidean},
}, },
plot_losses, one_hot, plot_losses,
prelude::*, prelude::*,
}; };
@ -59,23 +60,27 @@ pub fn main() {
neura_layer!("dense", 100).activation(Swish(Logistic)), neura_layer!("dense", 100).activation(Swish(Logistic)),
neura_layer!("dense", 50).activation(Swish(Logistic)), neura_layer!("dense", 50).activation(Swish(Logistic)),
neura_layer!("dense", LATENT_SIZE).activation(Tanh), neura_layer!("dense", LATENT_SIZE).activation(Tanh),
neura_layer!("dense", 50), neura_layer!("dense", 100).activation(Swish(Logistic)),
neura_layer!("dense", 100),
neura_layer!("dense", WIDTH * HEIGHT).activation(Relu), neura_layer!("dense", WIDTH * HEIGHT).activation(Relu),
] ]
.construct(NeuraShape::Vector(WIDTH * HEIGHT)) .construct(NeuraShape::Vector(WIDTH * HEIGHT))
.unwrap(); .unwrap();
let trainer = NeuraBatchedTrainer::with_epochs(0.03, 75, 512, TRAIN_SIZE); let mut trainer = NeuraBatchedTrainer::with_epochs(0.03, 200, 512, TRAIN_SIZE);
trainer.learning_momentum = 0.002;
// trainer.log_iterations = 1; // trainer.log_iterations = 1;
let mut rng = rand::thread_rng();
let losses = trainer.train( let losses = trainer.train(
&NeuraBackprop::new(Euclidean), &NeuraBackprop::new(Euclidean),
&mut network, &mut network,
cycle_shuffling( cycle_shuffling(train_images.clone(), rand::thread_rng()).map(move |input| {
train_images.clone().zip(train_images.clone()), let dx = rng.gen_range(-4..4);
rand::thread_rng(), let dy = rng.gen_range(-4..4);
),
let shifted = shift(&input, dx, dy);
(shifted.clone(), shifted)
}),
&test_data, &test_data,
); );
@ -83,7 +88,7 @@ pub fn main() {
// Then, train a small network to decode the encoded data into the categories // Then, train a small network to decode the encoded data into the categories
let trimmed_network = network.clone().trim_tail().trim_tail().trim_tail(); let trimmed_network = network.clone().trim_tail().trim_tail();
let mut network = neura_sequential![ let mut network = neura_sequential![
..trimmed_network.lock(), ..trimmed_network.lock(),
@ -102,11 +107,11 @@ pub fn main() {
.zip(test_labels.clone()) .zip(test_labels.clone())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let trainer = NeuraBatchedTrainer::with_epochs(0.03, 20, 128, TRAIN_SIZE); let trainer = NeuraBatchedTrainer::with_epochs(0.03, 10, 128, TRAIN_SIZE);
plot_losses( plot_losses(
trainer.train( trainer.train(
&NeuraBackprop::new(Euclidean), &NeuraBackprop::new(CrossEntropy),
&mut network, &mut network,
cycle_shuffling(train_images.clone().zip(train_labels), rand::thread_rng()), cycle_shuffling(train_images.clone().zip(train_labels), rand::thread_rng()),
&test_data, &test_data,
@ -135,10 +140,21 @@ pub fn main() {
); );
} }
fn one_hot(value: usize, categories: usize) -> DVector<f32> { fn shift(image: &DVector<f32>, dx: i32, dy: i32) -> DVector<f32> {
let mut res = DVector::from_element(categories, 0.0); let mut res = DVector::from_element(image.len(), 0.0);
if value < categories { let width = WIDTH as i32;
res[value] = 1.0; let height = HEIGHT as i32;
for y in 0..height {
for x in 0..width {
let x2 = x + dx;
let y2 = y + dy;
if y2 < 0 || y2 >= height || x2 < 0 || x2 >= width {
continue;
} }
res[(y2 * width + x2) as usize] = image[(y * width + x) as usize];
}
}
res res
} }

@ -10,7 +10,7 @@ use neuramethyst::{
loss::Euclidean, loss::Euclidean,
regularize::NeuraL2, regularize::NeuraL2,
}, },
plot_losses, one_hot, plot_losses,
prelude::*, prelude::*,
}; };
@ -145,14 +145,6 @@ fn uniform_vector(length: usize) -> DVector<f32> {
res res
} }
fn one_hot(value: usize, categories: usize) -> DVector<f32> {
let mut res = DVector::from_element(categories, 0.0);
if value < categories {
res[value] = 1.0;
}
res
}
fn add_noise(mut image: DVector<f32>, rng: &mut impl Rng, amount: f32) -> DVector<f32> { fn add_noise(mut image: DVector<f32>, rng: &mut impl Rng, amount: f32) -> DVector<f32> {
if amount <= 0.0 { if amount <= 0.0 {
return image; return image;

@ -2,7 +2,7 @@ use crate::algebra::NeuraVector;
pub mod activation; pub mod activation;
pub mod loss; pub mod loss;
pub mod reduce; // pub mod reduce;
pub mod regularize; pub mod regularize;
pub trait NeuraDerivable<F> { pub trait NeuraDerivable<F> {

@ -0,0 +1,93 @@
//! Various error types
//!
use std::fmt::{Debug, Formatter};
use crate::prelude::*;
pub trait NeuraRecursiveErrDebug {
fn fmt_rec(&self, f: &mut Formatter<'_>, depth: usize) -> std::fmt::Result;
}
impl<Err: Debug> NeuraRecursiveErrDebug for NeuraRecursiveErr<Err, ()> {
fn fmt_rec(&self, f: &mut Formatter<'_>, depth: usize) -> std::fmt::Result {
match self {
Self::Current(err) => {
write!(f, "NeuraRecursiveErr(depth={}, ", depth)?;
err.fmt(f)?;
write!(f, ")")
}
Self::Child(_) => write!(f, "NeuraRecursiveErr(depth={}, ())", depth),
}
}
}
impl<Err: Debug, ChildErr: NeuraRecursiveErrDebug> NeuraRecursiveErrDebug
for NeuraRecursiveErr<Err, ChildErr>
{
#[inline]
fn fmt_rec(&self, f: &mut Formatter<'_>, depth: usize) -> std::fmt::Result {
match self {
Self::Current(err) => {
write!(f, "NeuraRecursiveErr(depth={}, ", depth)?;
err.fmt(f)?;
write!(f, ")")
}
Self::Child(err) => err.fmt_rec(f, depth + 1),
}
}
}
impl<Err: Debug, ChildErr> Debug for NeuraRecursiveErr<Err, ChildErr>
where
Self: NeuraRecursiveErrDebug,
{
#[inline]
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
self.fmt_rec(f, 0)
}
}
/// Error type returned by `NeuraIsolateLayer::construct`
#[derive(Clone, Debug)]
pub enum NeuraIsolateLayerErr {
Incompatible {
start: NeuraShape,
end: NeuraShape,
input_shape: NeuraShape,
},
OutOfBound {
start: NeuraShape,
end: NeuraShape,
input_shape: NeuraShape,
},
OutOfOrder {
start: NeuraShape,
end: NeuraShape,
},
}
#[derive(Clone, Copy, Debug)]
pub enum NeuraAxisErr {
NoInput,
ConflictingShape(NeuraShape, NeuraShape),
}
#[derive(Clone, Debug)]
pub enum NeuraResidualConstructErr<LayerErr> {
Layer(LayerErr),
WrongConnection(isize),
AxisErr(NeuraAxisErr),
NoOutput,
}
#[derive(Clone)]
pub enum NeuraRecursiveErr<Err, ChildErr> {
Current(Err),
Child(ChildErr),
}
pub struct NeuraDimensionsMismatch {
pub existing: usize,
pub new: NeuraShape,
}

@ -4,7 +4,7 @@ use nalgebra::{DMatrix, DVector};
use num::Float; use num::Float;
use rand::Rng; use rand::Rng;
use crate::derivable::NeuraDerivable; use crate::{derivable::NeuraDerivable, err::NeuraDimensionsMismatch};
use super::*; use super::*;
@ -158,6 +158,24 @@ where
} }
} }
impl<F: Float, Act: NeuraDerivable<F>, Reg: NeuraDerivable<F>> NeuraPartialLayer
for NeuraDenseLayer<F, Act, Reg>
{
type Constructed = Self;
type Err = NeuraDimensionsMismatch;
fn construct(self, input_shape: NeuraShape) -> Result<Self::Constructed, Self::Err> {
if input_shape.size() != self.weights.shape().1 {
return Err(NeuraDimensionsMismatch {
existing: self.weights.shape().1,
new: input_shape,
});
}
Ok(self)
}
}
impl< impl<
F: Float + std::fmt::Debug + 'static + std::ops::AddAssign + std::ops::MulAssign, F: Float + std::fmt::Debug + 'static + std::ops::AddAssign + std::ops::MulAssign,
Act: NeuraDerivable<F>, Act: NeuraDerivable<F>,

@ -1,5 +1,7 @@
use nalgebra::{DVector, Scalar}; use nalgebra::{DVector, Scalar};
use crate::err::NeuraIsolateLayerErr;
use super::*; use super::*;
/// **Class invariant:** start and end are /// **Class invariant:** start and end are
@ -9,24 +11,6 @@ pub struct NeuraIsolateLayer {
end: NeuraShape, end: NeuraShape,
} }
#[derive(Clone, Debug)]
pub enum NeuraIsolateLayerErr {
Incompatible {
start: NeuraShape,
end: NeuraShape,
input_shape: NeuraShape,
},
OutOfBound {
start: NeuraShape,
end: NeuraShape,
input_shape: NeuraShape,
},
OutOfOrder {
start: NeuraShape,
end: NeuraShape,
},
}
impl NeuraIsolateLayer { impl NeuraIsolateLayer {
pub fn new<T: Into<NeuraShape>>(start: T, end: T) -> Option<Self> { pub fn new<T: Into<NeuraShape>>(start: T, end: T) -> Option<Self> {
let start = start.into(); let start = start.into();

@ -5,6 +5,7 @@ pub mod layer;
pub mod network; pub mod network;
pub mod train; pub mod train;
pub mod err;
mod utils; mod utils;
// TODO: move to a different file // TODO: move to a different file

@ -2,17 +2,11 @@ use std::borrow::Borrow;
use nalgebra::{Const, DVector, Dyn, Scalar, VecStorage}; use nalgebra::{Const, DVector, Dyn, Scalar, VecStorage};
use crate::prelude::NeuraShape; use crate::{err::NeuraAxisErr, prelude::NeuraShape};
#[derive(Clone, Copy, Debug)] #[derive(Clone, Copy, Debug)]
pub struct NeuraAxisAppend; pub struct NeuraAxisAppend;
#[derive(Clone, Copy, Debug)]
pub enum NeuraAxisErr {
NoInput,
ConflictingShape(NeuraShape, NeuraShape),
}
pub trait NeuraCombineInputs<T> { pub trait NeuraCombineInputs<T> {
type Combined; type Combined;

@ -1,4 +1,7 @@
use crate::err::*;
use super::*; use super::*;
use NeuraResidualConstructErr::*;
pub trait NeuraResidualConstruct { pub trait NeuraResidualConstruct {
type Constructed; type Constructed;
@ -12,64 +15,13 @@ pub trait NeuraResidualConstruct {
) -> Result<Self::Constructed, Self::Err>; ) -> Result<Self::Constructed, Self::Err>;
} }
#[derive(Clone, Debug)]
pub enum NeuraResidualConstructErr<LayerErr, ChildErr> {
LayerErr(LayerErr),
ChildErr(ChildErr),
WrongConnection(isize),
AxisErr(NeuraAxisErr),
NoOutput,
}
use NeuraResidualConstructErr::*;
// impl<Layer: NeuraPartialLayer, Axis> NeuraResidualConstruct for NeuraResidualNode<Layer, NeuraResidualLast, Axis>
// where
// Axis: NeuraCombineInputs<NeuraShape, Combined = Result<NeuraShape, NeuraAxisErr>>,
// {
// type Constructed = NeuraResidualNode<Layer::Constructed, NeuraResidualLast, Axis>;
// type Err = NeuraResidualConstructErr<Layer::Err, <NeuraResidualLast as NeuraPartialLayer>::Err>;
// fn construct_residual(
// self,
// inputs: NeuraResidualInput<NeuraShape>,
// indices: NeuraResidualInput<usize>,
// current_index: usize,
// ) -> Result<Self::Constructed, Self::Err> {
// let (layer_input_shape, _rest) = inputs.shift();
// let layer_input_shape = self
// .axis
// .combine(layer_input_shape)
// .map_err(|e| AxisErr(e))?;
// let layer = self
// .layer
// .construct(layer_input_shape)
// .map_err(|e| LayerErr(e))?;
// let layer_shape = layer.output_shape();
// if let Some(oob_offset) = self.offsets.iter().copied().find(|o| *o > 0) {
// return Err(WrongConnection(oob_offset));
// }
// // TODO: check rest for non-zero columns
// Ok(NeuraResidualNode {
// layer,
// child_network: (),
// offsets: self.offsets,
// axis: self.axis,
// output_shape: Some(layer_shape),
// })
// }
// }
impl<Layer: NeuraPartialLayer, ChildNetwork: NeuraResidualConstruct, Axis> NeuraResidualConstruct impl<Layer: NeuraPartialLayer, ChildNetwork: NeuraResidualConstruct, Axis> NeuraResidualConstruct
for NeuraResidualNode<Layer, ChildNetwork, Axis> for NeuraResidualNode<Layer, ChildNetwork, Axis>
where where
Axis: NeuraCombineInputs<NeuraShape, Combined = Result<NeuraShape, NeuraAxisErr>>, Axis: NeuraCombineInputs<NeuraShape, Combined = Result<NeuraShape, NeuraAxisErr>>,
{ {
type Constructed = NeuraResidualNode<Layer::Constructed, ChildNetwork::Constructed, Axis>; type Constructed = NeuraResidualNode<Layer::Constructed, ChildNetwork::Constructed, Axis>;
type Err = NeuraResidualConstructErr<Layer::Err, ChildNetwork::Err>; type Err = NeuraRecursiveErr<NeuraResidualConstructErr<Layer::Err>, ChildNetwork::Err>;
fn construct_residual( fn construct_residual(
self, self,
@ -82,16 +34,19 @@ where
let self_input_shapes = input_shapes.iter().map(|x| **x).collect::<Vec<_>>(); let self_input_shapes = input_shapes.iter().map(|x| **x).collect::<Vec<_>>();
let layer_input_shape = self.axis.combine(input_shapes).map_err(|e| AxisErr(e))?; let layer_input_shape = self
.axis
.combine(input_shapes)
.map_err(|e| NeuraRecursiveErr::Current(AxisErr(e)))?;
let layer = self let layer = self
.layer .layer
.construct(layer_input_shape) .construct(layer_input_shape)
.map_err(|e| LayerErr(e))?; .map_err(|e| NeuraRecursiveErr::Current(Layer(e)))?;
let layer_shape = Rc::new(layer.output_shape()); let layer_shape = Rc::new(layer.output_shape());
if self.offsets.len() == 0 { if self.offsets.len() == 0 {
return Err(NoOutput); return Err(NeuraRecursiveErr::Current(NoOutput));
} }
for &offset in &self.offsets { for &offset in &self.offsets {
@ -109,7 +64,7 @@ where
let child_network = self let child_network = self
.child_network .child_network
.construct_residual(rest_inputs, rest_indices, current_index + 1) .construct_residual(rest_inputs, rest_indices, current_index + 1)
.map_err(|e| ChildErr(e))?; .map_err(|e| NeuraRecursiveErr::Child(e))?;
Ok(NeuraResidualNode { Ok(NeuraResidualNode {
layer, layer,

@ -1,7 +1,10 @@
use crate::err::*;
use crate::layer::*; use crate::layer::*;
use crate::network::*; use crate::network::*;
use crate::utils::unwrap_or_clone; use crate::utils::unwrap_or_clone;
use NeuraResidualConstructErr::*;
use std::borrow::Cow; use std::borrow::Cow;
use super::construct::*; use super::construct::*;
@ -28,7 +31,7 @@ impl Default for NeuraResidualLast {
impl NeuraResidualConstruct for NeuraResidualLast { impl NeuraResidualConstruct for NeuraResidualLast {
type Constructed = NeuraResidualLast; type Constructed = NeuraResidualLast;
type Err = NeuraResidualConstructErr<(), ()>; type Err = NeuraRecursiveErr<NeuraResidualConstructErr<()>, ()>;
fn construct_residual( fn construct_residual(
self, self,
@ -39,15 +42,15 @@ impl NeuraResidualConstruct for NeuraResidualLast {
let (this_input, _rest) = input.shift(); let (this_input, _rest) = input.shift();
let index = indices let index = indices
.get_first() .get_first()
.ok_or(Self::Err::AxisErr(NeuraAxisErr::NoInput))?; .ok_or(Self::Err::Current(AxisErr(NeuraAxisErr::NoInput)))?;
if *index != current_index - 1 { if *index != current_index - 1 {
return Err(Self::Err::WrongConnection( return Err(Self::Err::Current(WrongConnection(
current_index as isize - *index as isize - 1, current_index as isize - *index as isize - 1,
)); )));
} }
if this_input.len() != 1 { if this_input.len() != 1 {
return Err(Self::Err::AxisErr(NeuraAxisErr::NoInput)); return Err(Self::Err::Current(AxisErr(NeuraAxisErr::NoInput)));
} }
// TODO: check that rest contains nothing else // TODO: check that rest contains nothing else

@ -12,7 +12,7 @@ mod axis;
pub use axis::*; pub use axis::*;
mod construct; mod construct;
pub use construct::NeuraResidualConstructErr; pub use construct::NeuraResidualConstruct;
mod node; mod node;
pub use node::*; pub use node::*;

@ -1,13 +1,7 @@
use crate::layer::NeuraShapedLayer; use crate::{err::NeuraRecursiveErr, layer::NeuraShapedLayer};
use super::*; use super::*;
#[derive(Debug, Clone)]
pub enum NeuraSequentialConstructErr<Err, ChildErr> {
Current(Err),
Child(ChildErr),
}
impl<Layer: NeuraPartialLayer> NeuraPartialLayer for NeuraSequential<Layer, ()> { impl<Layer: NeuraPartialLayer> NeuraPartialLayer for NeuraSequential<Layer, ()> {
type Constructed = NeuraSequential<Layer::Constructed, ()>; type Constructed = NeuraSequential<Layer::Constructed, ()>;
type Err = Layer::Err; type Err = Layer::Err;
@ -24,19 +18,19 @@ impl<Layer: NeuraPartialLayer, ChildNetwork: NeuraPartialLayer> NeuraPartialLaye
for NeuraSequential<Layer, ChildNetwork> for NeuraSequential<Layer, ChildNetwork>
{ {
type Constructed = NeuraSequential<Layer::Constructed, ChildNetwork::Constructed>; type Constructed = NeuraSequential<Layer::Constructed, ChildNetwork::Constructed>;
type Err = NeuraSequentialConstructErr<Layer::Err, ChildNetwork::Err>; type Err = NeuraRecursiveErr<Layer::Err, ChildNetwork::Err>;
fn construct(self, input_shape: NeuraShape) -> Result<Self::Constructed, Self::Err> { fn construct(self, input_shape: NeuraShape) -> Result<Self::Constructed, Self::Err> {
let layer = self let layer = self
.layer .layer
.construct(input_shape) .construct(input_shape)
.map_err(|e| NeuraSequentialConstructErr::Current(e))?; .map_err(|e| NeuraRecursiveErr::Current(e))?;
// TODO: ensure that this operation (and all recursive operations) are directly allocated on the heap // TODO: ensure that this operation (and all recursive operations) are directly allocated on the heap
let child_network = self let child_network = self
.child_network .child_network
.construct(layer.output_shape()) .construct(layer.output_shape())
.map_err(|e| NeuraSequentialConstructErr::Child(e))?; .map_err(|e| NeuraRecursiveErr::Child(e))?;
let child_network = Box::new(child_network); let child_network = Box::new(child_network);
Ok(NeuraSequential { Ok(NeuraSequential {

@ -1,4 +1,4 @@
use crate::algebra::NeuraVector; use nalgebra::DVector;
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) fn assign_add_vector<const N: usize>(sum: &mut [f64; N], operand: &[f64; N]) { pub(crate) fn assign_add_vector<const N: usize>(sum: &mut [f64; N], operand: &[f64; N]) {
@ -90,17 +90,15 @@ where
#[cfg(test)] #[cfg(test)]
pub(crate) fn uniform_vector(length: usize) -> nalgebra::DVector<f64> { pub(crate) fn uniform_vector(length: usize) -> nalgebra::DVector<f64> {
use nalgebra::DVector;
use rand::Rng; use rand::Rng;
let mut rng = rand::thread_rng(); let mut rng = rand::thread_rng();
DVector::from_fn(length, |_, _| -> f64 { rng.gen() }) DVector::from_fn(length, |_, _| -> f64 { rng.gen() })
} }
#[deprecated] pub fn one_hot(value: usize, categories: usize) -> DVector<f32> {
pub fn one_hot<const N: usize>(value: usize) -> NeuraVector<N, f64> { let mut res = DVector::from_element(categories, 0.0);
let mut res = NeuraVector::default(); if value < categories {
if value < N {
res[value] = 1.0; res[value] = 1.0;
} }
res res

Loading…
Cancel
Save