package Neuron;
use Strict;
use Carp;
#######################################################################
#
# Class: Neuron
#
# A neuron has an internal state (V), a set of parent
# nodes (PARENTS), a set of weights connected with the
# parent nodes (W), a set of child nodes (CHILDREN),
# and a delta (DELTA) which is computed during backpropogation
# and is used by its parent nodes for weight correction.
#
#######################################################################
#######################################################################
#
# Package (class) data
#
# $beta is a common variable representing the 'harshness' of
# the sigmoid function to input data. $beta is set by the
# application using the neural network.
#
#######################################################################
my $beta;
sub beta
{
if (@_) { $beta = shift; }
return $beta;
}
#
# Sigmoid function
#
sub g
{
my ($h) = @_;
return 1.0 / ( 1.0 + exp( -2.0 * $beta * $h ) );
}
#######################################################################
#
# Instance (object) data
#
# new(): creates a new Neuron.
# V(): accessor for the neuron's state.
# delta(): accessor for the neuron's delta.
# parents(): read-only accessor to return list of parent nodes.
# weights(): read-only accessor to return list of weights.
# children(): read-only accessor to return list of children.
# addChild(): adds a child node.
# toString(): returns basic data about the neuron in a String.
#
# calculateValue(): calculates state (V) of neuron using
# forward propogation.
#
#######################################################################
# constructor
sub new
{
my $proto = shift;
my $class = ref( $proto ) || $proto;
my $self = {};
$self->{V} = 0.0;
$self->{DELTA} = 0.0;
$self->{PARENTS} = [];
$self->{W} = [];
$self->{CHILDREN}= [];
#
# Allow methods to be called on this object.
#
bless( $self, $class );
return $self;
}
sub V
{
my $self = shift;
if (@_) { $self->{V} = shift; }
return $self->{V};
}
sub delta
{
my $self = shift;
if (@_) { $self->{DELTA} = shift; }
return $self->{DELTA};
}
sub parents
{
my $self = shift;
return $self->{PARENTS};
}
sub weights
{
my $self = shift;
return $self->{W};
}
sub children
{
my $self = shift;
return $self->{CHILDREN};
}
sub addChild
{
my $self = shift;
my $child = shift;
push( @{$self->{CHILDREN}}, $child );
push( @{$child->{PARENTS}}, $self );
push( @{$child->{W}}, 2 * (rand() - 0.5) );
}
sub toString
{
my $self = shift;
return "V : ", $self->V, "\n"
, "delta : ", $self->delta, "\n"
, "parents : ", scalar @{$self->parents}, "\n"
, "children: ", scalar @{$self->children}, "\n",
, "weights : ", join( " ", @{$self->weights} ), "\n"
;
}
#
# Calculates V based on parents' V and weights.
#
sub calculateValue
{
my $self = shift;
my $h = 0.0;
#print "parents: ", scalar @{$self->parents}, "\n";
for ( my $i=0; $i<@{$self->parents}; $i++ )
{
my $parentRef = ${$self->parents}[$i];
$h += $parentRef->V * ${$self->weights}[$i];
#print "parent V=", $parentRef->V, "\n";
#print "h=$h\n";
}
#
# Here's the sigmoid.
#
$self->V( &g($h) );
#print "my V=", $self->V, "\n";
}
1; # so the require or use succeeds
####
package NeuralNet;
use Strict;
use Carp;
require "Neuron.pm";
#######################################################################
#
# Class; Neural Net
#
# Representation of a neural network. This class directly
# manages layers (LAYERS) of nodes (i.e. Neurons).
#
#######################################################################
# constructor
sub new
{
my $proto = shift;
my $class = ref( $proto ) || $proto;
my $self = {};
$self->{LAYERS} = [];
#
# Allow methods to be called on this object.
#
bless( $self, $class );
return $self;
}
#
# layers: read-only method that returns a list of all layers.
#
sub layers
{
my $self = shift;
return $self->{LAYERS};
}
#
# numLayers: read-only method that returns the number of layers.
#
sub numLayers
{
my $self = shift;
return scalar @{$self->{LAYERS}};
}
#
# layer: returns the n-th layer requested.
#
sub layer
{
my $self = shift;
my $num = shift;
my @layers = $self->layers;
return @{$layers[$num]};
}
#
# lastLayer: returns the output layer.
#
sub lastLayer
{
my $self = shift;
my @layers = $self->layers;
return @{$layers[$#layers]};
}
#
# addLayer: Adds a reference to a layer of nodes.
#
# addLayer( number of nodes in layer )
#
sub addLayer
{
my $self = shift;
my $nodeCount = shift;
my @lastlayer = $self->lastLayer;
my @newlayer = ( 0..$nodeCount );
#
# create children
#
foreach (@newlayer)
{
$_ = Neuron->new();
}
#
# hook them up with the previous layer
# (if there is one)
#
foreach $parent (@lastlayer)
{
foreach $child (@newlayer)
{
$parent->addChild( $child );
}
}
#
# add them to the network
#
push( @{$self->{LAYERS}}, \@newlayer );
}
1;
####
require "Neuron.pm";
require "NeuralNet.pm";
Neuron::beta( 0.8 );
$parent = Neuron->new();
$neuron = Neuron->new();
$child = Neuron->new();
$parent->addChild( $neuron );
$neuron->addChild( $child );
$parent->V(1);
$neuron->calculateValue();
print $neuron->toString();