🐛 Fix broken tests, add NeuraSequentialLast

main
Shad Amethyst 2 years ago
parent 72ffce457a
commit fdc906c220

@ -1,4 +1,7 @@
use crate::network::residual::{NeuraAxisDefault, NeuraSplitInputs};
use crate::network::{
residual::{NeuraAxisDefault, NeuraSplitInputs},
sequential::NeuraSequentialLast,
};
use super::*;
@ -10,9 +13,9 @@ pub trait FromSequential<Seq, Data> {
) -> Self;
}
impl<Data> FromSequential<(), Data> for NeuraGraph<Data> {
impl<Data> FromSequential<NeuraSequentialLast, Data> for NeuraGraph<Data> {
fn from_sequential_rec(
_seq: &(),
_seq: &NeuraSequentialLast,
nodes: Vec<NeuraGraphNodeConstructed<Data>>,
input_shape: NeuraShape,
) -> Self {

@ -115,7 +115,6 @@ impl<
{
#[inline(always)]
fn output_shape(&self) -> NeuraShape {
todo!("output_shape for NeuraResidualNode is not yet ready");
self.child_network.output_shape()
}

@ -2,17 +2,17 @@ use crate::err::NeuraRecursiveErr;
use super::*;
impl<Layer: NeuraPartialLayer> NeuraPartialLayer for NeuraSequential<Layer, ()> {
type Constructed = NeuraSequential<Layer::Constructed, ()>;
type Err = Layer::Err;
// impl<Layer: NeuraPartialLayer> NeuraPartialLayer for NeuraSequential<Layer, ()> {
// type Constructed = NeuraSequential<Layer::Constructed, ()>;
// type Err = Layer::Err;
fn construct(self, input_shape: NeuraShape) -> Result<Self::Constructed, Self::Err> {
Ok(NeuraSequential {
layer: self.layer.construct(input_shape)?,
child_network: Box::new(()),
})
}
}
// fn construct(self, input_shape: NeuraShape) -> Result<Self::Constructed, Self::Err> {
// Ok(NeuraSequential {
// layer: self.layer.construct(input_shape)?,
// child_network: Box::new(()),
// })
// }
// }
impl<Layer: NeuraPartialLayer, ChildNetwork: NeuraPartialLayer> NeuraPartialLayer
for NeuraSequential<Layer, ChildNetwork>

@ -1,19 +1,11 @@
use super::*;
use crate::layer::{NeuraLayer, NeuraLayerBase};
// impl<Layer: NeuraLayerBase> NeuraLayerBase for NeuraSequential<Layer, ()> {
// #[inline(always)]
// fn output_shape(&self) -> NeuraShape {
// self.layer.output_shape()
// }
// }
impl<Layer: NeuraLayerBase, ChildNetwork: NeuraLayerBase> NeuraLayerBase
for NeuraSequential<Layer, ChildNetwork>
{
#[inline(always)]
fn output_shape(&self) -> NeuraShape {
todo!("Have output_shape return Option");
self.child_network.output_shape()
}

@ -8,11 +8,11 @@ pub trait NeuraSequentialLock {
fn lock(self) -> Self::Locked;
}
impl NeuraSequentialLock for () {
type Locked = ();
impl NeuraSequentialLock for NeuraSequentialLast {
type Locked = NeuraSequentialLast;
fn lock(self) -> Self::Locked {
()
self
}
}

@ -42,7 +42,7 @@ pub use tail::*;
/// instance.
///
/// The operations on the tail end are more complex, and require recursively traversing the `NeuraSequential` structure,
/// until an instance of `NeuraSequential<Layer, ()>` is found.
/// until an instance of `NeuraSequential<Layer, NeuraSequentialLast>` is found.
/// If your network feeds into a type that does not implement `NeuraSequentialTail`, then you will not be able to use those operations.
#[derive(Clone, Debug)]
pub struct NeuraSequential<Layer, ChildNetwork> {
@ -76,11 +76,11 @@ impl<Layer, ChildNetwork> NeuraSequential<Layer, ChildNetwork> {
}
}
impl<Layer> From<Layer> for NeuraSequential<Layer, ()> {
impl<Layer> From<Layer> for NeuraSequential<Layer, NeuraSequentialLast> {
fn from(layer: Layer) -> Self {
Self {
layer,
child_network: Box::new(()),
child_network: Box::new(NeuraSequentialLast::default()),
}
}
}
@ -154,7 +154,7 @@ where
#[macro_export]
macro_rules! neura_sequential {
[] => {
()
$crate::network::sequential::NeuraSequentialLast::default()
};
[ .. $network:expr $(,)? ] => {

@ -1,5 +1,127 @@
use super::*;
/// Last element of a NeuraSequential network
#[derive(Clone, Debug, PartialEq, Copy)]
pub struct NeuraSequentialLast {
shape: Option<NeuraShape>,
}
impl NeuraPartialLayer for NeuraSequentialLast {
type Constructed = NeuraSequentialLast;
type Err = ();
fn construct(mut self, input_shape: NeuraShape) -> Result<Self::Constructed, Self::Err> {
self.shape = Some(input_shape);
Ok(self)
}
}
impl NeuraLayerBase for NeuraSequentialLast {
type Gradient = ();
#[inline(always)]
fn output_shape(&self) -> NeuraShape {
self.shape
.expect("Called NeuraSequentialLast::output_shape() without building it")
}
#[inline(always)]
fn default_gradient(&self) -> Self::Gradient {
()
}
}
impl<Input: Clone> NeuraLayer<Input> for NeuraSequentialLast {
type Output = Input;
type IntermediaryRepr = ();
#[inline(always)]
fn eval_training(&self, input: &Input) -> (Self::Output, Self::IntermediaryRepr) {
(input.clone(), ())
}
#[inline(always)]
fn backprop_layer(
&self,
_input: &Input,
_intermediary: &Self::IntermediaryRepr,
epsilon: &Self::Output,
) -> Input {
epsilon.clone()
}
}
impl NeuraNetworkBase for NeuraSequentialLast {
type Layer = ();
#[inline(always)]
fn get_layer(&self) -> &Self::Layer {
&()
}
}
impl NeuraNetworkRec for NeuraSequentialLast {
type NextNode = ();
#[inline(always)]
fn get_next(&self) -> &Self::NextNode {
&()
}
#[inline(always)]
fn merge_gradient(
&self,
rec_gradient: <Self::NextNode as NeuraLayerBase>::Gradient,
_layer_gradient: <Self::Layer as NeuraLayerBase>::Gradient,
) -> Self::Gradient
where
Self::Layer: NeuraLayerBase,
{
rec_gradient
}
}
impl<Input: Clone> NeuraNetwork<Input> for NeuraSequentialLast {
type LayerInput = Input;
type NodeOutput = Input;
fn map_input<'a>(&'_ self, input: &'a Input) -> Cow<'a, Self::LayerInput> {
Cow::Borrowed(input)
}
fn map_output<'a>(
&'_ self,
_input: &'_ Input,
layer_output: &'a Input,
) -> Cow<'a, Self::NodeOutput> {
Cow::Borrowed(layer_output)
}
fn map_gradient_in<'a>(
&'_ self,
_input: &'_ Input,
gradient_in: &'a Self::NodeOutput,
) -> Cow<'a, Input> {
Cow::Borrowed(gradient_in)
}
fn map_gradient_out<'a>(
&'_ self,
_input: &'_ Input,
_gradient_in: &'_ Self::NodeOutput,
gradient_out: &'a Self::LayerInput,
) -> Cow<'a, Input> {
Cow::Borrowed(gradient_out)
}
}
impl Default for NeuraSequentialLast {
fn default() -> Self {
Self { shape: None }
}
}
/// Operations on the tail end of a sequential network
pub trait NeuraSequentialTail {
type TailTrimmed;
@ -10,13 +132,13 @@ pub trait NeuraSequentialTail {
}
// Trimming the last layer returns an empty network
impl<Layer> NeuraSequentialTail for NeuraSequential<Layer, ()> {
type TailTrimmed = ();
impl<Layer> NeuraSequentialTail for NeuraSequential<Layer, NeuraSequentialLast> {
type TailTrimmed = NeuraSequentialLast;
// GAT :3
type TailPushed<T> = NeuraSequential<Layer, NeuraSequential<T, ()>>;
type TailPushed<T> = NeuraSequential<Layer, NeuraSequential<T, NeuraSequentialLast>>;
fn trim_tail(self) -> Self::TailTrimmed {
()
NeuraSequentialLast::default()
}
fn push_tail<T>(self, layer: T) -> Self::TailPushed<T> {
@ -24,7 +146,7 @@ impl<Layer> NeuraSequentialTail for NeuraSequential<Layer, ()> {
layer: self.layer,
child_network: Box::new(NeuraSequential {
layer,
child_network: Box::new(()),
child_network: Box::new(NeuraSequentialLast::default()),
}),
}
}

Loading…
Cancel
Save