00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014 #include "net.h"
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027 Net::Net(){
00028 m_NumInputs = 2;
00029 m_NumOutputs = 2;
00030 m_NumHiddenLayers = 2;
00031 m_NumNeuronsPerLayer = 3;
00032
00033 CreateNet();
00034
00035 }
00036
00037
00038
00039
00040 Net::Net( int NumInputs,
00041 int NumOutputs,
00042 int NumHiddenLayers,
00043 int NumNeuronsPerLayer) : m_NumInputs(NumInputs),
00044 m_NumOutputs(NumOutputs),
00045 m_NumHiddenLayers(NumHiddenLayers),
00046 m_NumNeuronsPerLayer(NumNeuronsPerLayer)
00047
00048 {
00049 CreateNet();
00050 }
00051
00052
00053
00054
00055
00056 void Net::CreateNet(){
00057 m_vecLayers.push_back(NeuronLayer(m_NumNeuronsPerLayer, m_NumInputs));
00058 if (m_NumHiddenLayers > 1){
00059 for (int i=0; i<m_NumHiddenLayers-1; i++)
00060 m_vecLayers.push_back(NeuronLayer(m_NumNeuronsPerLayer, m_NumNeuronsPerLayer));
00061 }
00062
00063 m_vecLayers.push_back(NeuronLayer(m_NumOutputs, m_NumNeuronsPerLayer));
00064
00065 }
00066
00067 void Net::ModNet(int inputs, int outputs, int hiddenl, int hiddennodes){
00068 m_NumInputs = inputs;
00069 m_NumOutputs = outputs;
00070 m_NumHiddenLayers = hiddenl;
00071 m_NumNeuronsPerLayer = hiddennodes;
00072 }
00073
00074
00075
00076
00077
00078
00079
00080 vector<float> Net::GetWeights(){
00081 vector<float> weights;
00082 for (int i=0; i<m_NumHiddenLayers + 1; i++){
00083 for (int j=0; j<m_vecLayers[j].m_NumNeurons; j++){
00084 for (int k=0; k<m_vecLayers[i].m_vecNeurons[j].m_NumInputs; k++){
00085 weights.push_back(m_vecLayers[i].m_vecNeurons[j].m_vecWeight[k]);
00086 }
00087 }
00088 }
00089 return weights;
00090 }
00091
00092
00093
00094
00095
00096
00097
00098 void Net::PutWeights(vector<float> weights){
00099 int cWeight = 0;
00100 for (int i=0; i<m_NumHiddenLayers + 1; i++){
00101 for (int j=0; j<m_vecLayers[i].m_NumNeurons; j++){
00102 for (int k=0; k<m_vecLayers[i].m_vecNeurons[j].m_NumInputs; k++){
00103 m_vecLayers[i].m_vecNeurons[j].m_vecWeight[k] = weights[cWeight++];
00104 }
00105 }
00106 }
00107 return;
00108 }
00109
00110
00111
00112
00113
00114
00115
00116 int Net::GetNumberOfWeights(){
00117 int weights = 0;
00118 for (int i=0; i<m_NumHiddenLayers +1; i++){
00119 for (int j=0; j<m_vecLayers[i].m_NumNeurons; j++){
00120 for (int k=0; k<m_vecLayers[i].m_vecNeurons[j].m_NumInputs; k++)
00121 weights++;
00122 }
00123 }
00124 return weights;
00125 }
00126
00127
00128
00129
00130
00131
00132
00133 vector<float> Net::Update(vector<float> inputs){
00134 vector<float> outputs;
00135 int cWeight = 0;
00136
00137 if (inputs.size() != m_NumInputs)
00138 return outputs;
00139
00140 for (int i=0; i<m_NumHiddenLayers +1; i++){
00141 if ( i > 0 )
00142 inputs = outputs;
00143
00144 outputs.clear();
00145
00146 cWeight = 0;
00147
00148 for (int j=0; j<m_vecLayers[i].m_NumNeurons; j++){
00149 float netinput = 0.0f;
00150 int NumInputs = m_vecLayers[i].m_vecNeurons[j].m_NumInputs;
00151
00152 for (int k=0; k<NumInputs - 1; k++){
00153 netinput += m_vecLayers[i].m_vecNeurons[j].m_vecWeight[k] * inputs[cWeight++];
00154 }
00155
00156 netinput += m_vecLayers[i].m_vecNeurons[j].m_vecWeight[NumInputs-1] * BIAS;
00157
00158 outputs.push_back(Sigmoid (netinput, ACTIVATION_RESPONSE));
00159
00160 cWeight = 0;
00161
00162 }
00163 }
00164 return outputs;
00165 }
00166
00167
00168
00169
00170
00171 float Net::Sigmoid(float netinput, float response){
00172 return (1 / ( 1 + exp(-netinput / response)));
00173 }