Main Page   Alphabetical List   Compound List   File List   Compound Members   File Members  

nkern_net.cpp

Go to the documentation of this file.
00001 
00002 // Name:        nkern_net.cpp
00003 // Purpose:     nkernel network code
00004 // Author:      Chad Rempp
00005 // Modified by: Chad Rempp
00006 // Created:     03-01-02
00007 // Copyright:   (c) 2003 Chad Rempp
00008 // Licence:     GNU Public License (hey it's what all my friends are doing)
00010 
00011 #include "nkernel/nkern_net.h"
00012 
00013 // *****HACK***** I took this out because it was causing problems for the moment
00014 //extern SimData_typ g_SimData;
00015 
00017 // Net class Implementation
00019 
00021 Net::Net(){
00022                    m_NumInputs          = 2; //g_SimData.NumInputs;
00023                    m_NumOutputs         = 2; //g_SimData.NumOutputs;
00024                    m_NumHiddenLayers    = 2; //g_SimData.NumHiddenLayers;
00025                    m_NumNeuronsPerLayer = 3; //g_SimData.NeuronsPerHLayer;
00026 
00027                    CreateNet();
00028 
00029 }
00030 
00032 Net::Net( int NumInputs,
00033                   int NumOutputs,
00034                   int NumHiddenLayers,
00035                   int NumNeuronsPerLayer) : m_NumInputs(NumInputs),
00036                                             m_NumOutputs(NumOutputs),
00037                                             m_NumHiddenLayers(NumHiddenLayers),
00038                                             m_NumNeuronsPerLayer(NumNeuronsPerLayer)
00039 
00040 {
00041      CreateNet();
00042 }
00043 
00045 void Net::CreateNet(){
00046      m_vecLayers.push_back(NeuronLayer(m_NumNeuronsPerLayer, m_NumInputs));
00047      if (m_NumHiddenLayers > 1){
00048         for (int i=0; i<m_NumHiddenLayers-1; i++)
00049             m_vecLayers.push_back(NeuronLayer(m_NumNeuronsPerLayer, m_NumNeuronsPerLayer));
00050      }
00051 
00052      m_vecLayers.push_back(NeuronLayer(m_NumOutputs, m_NumNeuronsPerLayer));
00053 
00054 }
00055 
00057 void Net::ModNet(int inputs, int outputs, int hiddenl, int hiddennodes){
00058     m_NumInputs          = inputs; 
00059     m_NumOutputs         = outputs; 
00060     m_NumHiddenLayers    = hiddenl;
00061     m_NumNeuronsPerLayer = hiddennodes;
00062 }
00063 
00065 vector<float> Net::GetWeights(){
00066      vector<float> weights;
00067      for (int i=0; i<m_NumHiddenLayers + 1; i++){
00068          for (int j=0; j<m_vecLayers[j].m_NumNeurons; j++){
00069              for (int k=0; k<m_vecLayers[i].m_vecNeurons[j].m_NumInputs; k++){
00070                  weights.push_back(m_vecLayers[i].m_vecNeurons[j].m_vecWeight[k]);
00071              }
00072          }
00073      }
00074      return weights;
00075 }
00076 
00078 void Net::PutWeights(vector<float> weights){
00079      int cWeight = 0;
00080          for (int i=0; i<m_NumHiddenLayers + 1; i++){
00081              for (int j=0; j<m_vecLayers[i].m_NumNeurons; j++){
00082                  for (int k=0; k<m_vecLayers[i].m_vecNeurons[j].m_NumInputs; k++){
00083                      m_vecLayers[i].m_vecNeurons[j].m_vecWeight[k] = weights[cWeight++];
00084                  }
00085              }
00086          }
00087      return;
00088 }
00089 
00091 int Net::GetNumberOfWeights(){
00092     int weights = 0;
00093     for (int i=0; i<m_NumHiddenLayers +1; i++){
00094         for (int j=0; j<m_vecLayers[i].m_NumNeurons; j++){
00095             for (int k=0; k<m_vecLayers[i].m_vecNeurons[j].m_NumInputs; k++)
00096                 weights++;
00097         }
00098     }
00099     return weights;
00100 }
00101 
00103 vector<float> Net::Update(vector<float> inputs){
00104    vector<float> outputs;
00105    int cWeight = 0;
00106 
00107    if (int(inputs.size()) != m_NumInputs)
00108       return outputs;
00109 
00110    for (int i=0; i<m_NumHiddenLayers +1; i++){
00111        if ( i > 0 )
00112           inputs = outputs;
00113 
00114        outputs.clear();
00115 
00116        cWeight = 0;
00117 
00118        for (int j=0; j<m_vecLayers[i].m_NumNeurons; j++){
00119            float netinput = 0.0f;
00120            int NumInputs = m_vecLayers[i].m_vecNeurons[j].m_NumInputs;
00121 
00122            for (int k=0; k<NumInputs - 1; k++){
00123                netinput += m_vecLayers[i].m_vecNeurons[j].m_vecWeight[k] * inputs[cWeight++];
00124            }
00125 
00126            netinput += m_vecLayers[i].m_vecNeurons[j].m_vecWeight[NumInputs-1] * BIAS;
00127 
00128            outputs.push_back(Sigmoid (netinput, ACTIVATION_RESPONSE));
00129 
00130            cWeight = 0;
00131 
00132        }
00133    }
00134    return outputs;
00135 }
00136 
00138 float Net::Sigmoid(float netinput, float response){
00139       return (1 / ( 1 + exp(-netinput / response)));
00140 }

Generated on Mon Jun 23 23:09:51 2003 for NeReK Documentation by doxygen1.2.14 written by Dimitri van Heesch, © 1997-2002