Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
276 changes: 276 additions & 0 deletions src/NeuralNet/FeedForwards/FeedForward.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,276 @@
<?php

namespace Rubix\ML\NeuralNet\FeedForwards;

use NDArray;
use NumPower;
use Rubix\ML\NeuralNet\Layers\Base\Contracts\Hidden;
use Rubix\ML\NeuralNet\Layers\Base\Contracts\Input;
use Rubix\ML\NeuralNet\Layers\Base\Contracts\Layer;
use Rubix\ML\NeuralNet\Layers\Base\Contracts\Output;
use Rubix\ML\NeuralNet\Layers\Base\Contracts\Parametric;
use Rubix\ML\Encoding;
use Rubix\ML\Datasets\Dataset;
use Rubix\ML\Datasets\Labeled;
use Rubix\ML\NeuralNet\Networks\Network;
use Rubix\ML\NeuralNet\Optimizers\Base\Adaptive;
use Rubix\ML\NeuralNet\Optimizers\Base\Optimizer;
use Traversable;

use function array_reverse;

/**
* Feed Forward
*
* A feed forward neural network implementation consisting of an input and
* output layer and any number of intermediate hidden layers.
*
* @internal
*
* @category Machine Learning
* @package Rubix/ML
* @author Andrew DalPino
* @author Samuel Akopyan <leumas.a@gmail.com>
*/
class FeedForward extends Network
{
/**
* The input layer to the network.
*
* @var Input
*/
protected Input $input;

/**
* The hidden layers of the network.
*
* @var list<Hidden>
*/
protected array $hidden = [
//
];

/**
* The pathing of the backward pass through the hidden layers.
*
* @var list<Hidden>
*/
protected array $backPass = [
//
];

/**
* The output layer of the network.
*
* @var Output
*/
protected Output $output;

/**
* The gradient descent optimizer used to train the network.
*
* @var Optimizer
*/
protected Optimizer $optimizer;

/**
* @param Input $input
* @param Hidden[] $hidden
* @param Output $output
* @param Optimizer $optimizer
*/
public function __construct(Input $input, array $hidden, Output $output, Optimizer $optimizer)
{
$hidden = array_values($hidden);

$backPass = array_reverse($hidden);

$this->input = $input;
$this->hidden = $hidden;
$this->output = $output;
$this->optimizer = $optimizer;
$this->backPass = $backPass;
}

/**
* Return the input layer.
*
* @return Input
*/
public function input() : Input
{
return $this->input;
}

/**
* Return an array of hidden layers indexed left to right.
*
* @return list<Hidden>
*/
public function hidden() : array
{
return $this->hidden;
}

/**
* Return the output layer.
*
* @return Output
*/
public function output() : Output
{
return $this->output;
}

/**
* Return all the layers in the network.
*
* @return Traversable<Layer>
*/
public function layers() : Traversable
{
yield $this->input;

yield from $this->hidden;

yield $this->output;
}

/**
* Return the number of trainable parameters in the network.
*
* @return int
*/
public function numParams() : int
{
$numParams = 0;

foreach ($this->layers() as $layer) {
if ($layer instanceof Parametric) {
foreach ($layer->parameters() as $parameter) {
$numParams += $parameter->param()->size();
}
}
}

return $numParams;
}

/**
* Initialize the parameters of the layers and warm the optimizer cache.
*/
public function initialize() : void
{
$fanIn = 1;

foreach ($this->layers() as $layer) {
$fanIn = $layer->initialize($fanIn);
}

if ($this->optimizer instanceof Adaptive) {
foreach ($this->layers() as $layer) {
if ($layer instanceof Parametric) {
foreach ($layer->parameters() as $param) {
$this->optimizer->warm($param);
}
}
}
}
}

/**
* Run an inference pass and return the activations at the output layer.
*
* @param Dataset $dataset
* @return NDArray
*/
public function infer(Dataset $dataset) : NDArray
{
$input = NumPower::transpose(NumPower::array($dataset->samples()), [1, 0]);

foreach ($this->layers() as $layer) {
$input = $layer->infer($input);
}

return NumPower::transpose($input, [1, 0]);
}

/**
* Perform a forward and backward pass of the network in one call. Returns
* the loss from the backward pass.
*
* @param Labeled $dataset
* @return float
*/
public function roundtrip(Labeled $dataset) : float
{
$input = NumPower::transpose(NumPower::array($dataset->samples()), [1, 0]);

$this->feed($input);

$loss = $this->backpropagate($dataset->labels());

return $loss;
}

/**
* Feed a batch through the network and return a matrix of activations at the output later.
*
* @param NDArray $input
* @return NDArray
*/
public function feed(NDArray $input) : NDArray
{
foreach ($this->layers() as $layer) {
$input = $layer->forward($input);
}

return $input;
}

/**
* Backpropagate the gradient of the cost function and return the loss.
*
* @param list<string|int|float> $labels
* @return float
*/
public function backpropagate(array $labels) : float
{
[$gradient, $loss] = $this->output->back($labels, $this->optimizer);

foreach ($this->backPass as $layer) {
$gradient = $layer->back($gradient, $this->optimizer);
}

return $loss;
}

/**
* Export the network architecture as a graph in dot format.
*
* @return Encoding
*/
public function exportGraphviz() : Encoding
{
$dot = 'digraph Tree {' . PHP_EOL;
$dot .= ' node [shape=box, fontname=helvetica];' . PHP_EOL;

$layerNum = 0;

foreach ($this->layers() as $layer) {
++$layerNum;

$dot .= " N$layerNum [label=\"$layer\",style=\"rounded\"]" . PHP_EOL;

if ($layerNum > 1) {
$parentId = $layerNum - 1;

$dot .= " N{$parentId} -> N{$layerNum};" . PHP_EOL;
}
}

$dot .= '}';

return new Encoding($dot);
}
}
2 changes: 1 addition & 1 deletion src/NeuralNet/Layers/Base/Contracts/Layer.php
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
use Stringable;

/**
* Hidden
* Layer
*
* @category Machine Learning
* @package Rubix/ML
Expand Down
12 changes: 7 additions & 5 deletions src/NeuralNet/Layers/Swish/Swish.php
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
use Rubix\ML\NeuralNet\Parameters\Parameter;
use Generator;

use const Rubix\ML\EPSILON;

/**
* Swish
*
Expand Down Expand Up @@ -266,6 +268,7 @@ protected function activate(NDArray $input) : NDArray

/**
* Calculate the derivative of the activation function at a given output.
* Formulation: derivative = (output / input) * (1 - output) + output
*
* @param NDArray $input
* @param NDArray $output
Expand All @@ -278,12 +281,11 @@ protected function differentiate(NDArray $input, NDArray $output) : NDArray
throw new RuntimeException('Layer has not been initialized.');
}

// Original formulation:
// derivative = (output / input) * (1 - output) + output
// Implemented using NumPower operations to avoid explicit ones matrix.
$term1 = NumPower::divide($output, $input);
$oneMinusOutput = NumPower::subtract(1.0, $output);
// Prevent division by zero if the input contains zero values
$denominator = NumPower::add($input, EPSILON);
$term1 = NumPower::divide($output, $denominator);

$oneMinusOutput = NumPower::subtract(1.0, $output);
$product = NumPower::multiply($term1, $oneMinusOutput);

return NumPower::add($product, $output);
Expand Down
Loading
Loading