Skip to content

Commit

Permalink
Optimize Individual::forward_pass (use fold)
Browse files Browse the repository at this point in the history
  • Loading branch information
daniil-berg committed Dec 11, 2023
1 parent 3bda6d5 commit afce949
Showing 1 changed file with 4 additions and 8 deletions.
12 changes: 4 additions & 8 deletions src/individual.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,13 +75,7 @@ impl<T: Tensor> Individual<T> {
/// # Returns
/// Output tensor from the last layer
pub fn forward_pass(&self, input: &T) -> T {
let mut _weighted_input: T;
let mut output: T = input.clone();
// TODO: Consider replacing this loop with `Iterator::fold`.
for layer in (self.layers).iter() {
(_weighted_input, output) = layer.feed_forward(&output);
}
return output;
self.layers.iter().fold(input.clone(), |output, layer| layer.feed_forward(&output).1)
}

/// Passes the `input` through the network and returns the intermediate results of each layer.
Expand All @@ -99,8 +93,10 @@ impl<T: Tensor> Individual<T> {
let mut activations = Vec::<T>::with_capacity(num_layers);
let mut activation: T = input.clone();
let mut weighted_input: T;
// TODO: See, if we actually need this first item in `backprop` (below).
// Consider replacing this loop with `Iterator` methods.
// https://github.com/mfajnberg/tensorevo/issues/21
activations.push(activation.clone());
// TODO: Consider replacing this loop with `Iterator::map` and `collect`.
for layer in &self.layers {
(weighted_input, activation) = layer.feed_forward(&activation);
weighted_inputs.push(weighted_input);
Expand Down

0 comments on commit afce949

Please sign in to comment.