It is difficult for me to create a minimal example, because I think this is due to some other code. However, I believe that I have provided the appropriate code below.
I deleted class sections, etc., which I think are not very important for the problem.
I have a neural network class that uses neurons:
Neuron
template<std::size_t NumInputs>
class Neuron
{
public:
Neuron()
{
for(auto& i : m_inputValues)
i = 0;
for(auto& e : m_eligibilityTraces)
e = 0;
for(auto& w : m_weights)
w = 0;
m_biasWeight = 0;
m_biasEligibilityTrace = 0;
m_outputValue = 0;
}
void SetInputValue(const std::size_t index, const double value)
{
m_inputValues[index] = value;
}
void SetWeight(const std::size_t index, const double weight)
{
if(std::isnan(weight))
throw std::runtime_error("Shit! this is a nan bread");
m_weights[index] = weight;
}
void SetBiasWeight(const double weight)
{
m_biasWeight = weight;
}
double GetInputValue(const std::size_t index) const
{
return m_inputValues[index];
}
double GetWeight(const std::size_t index) const
{
return m_weights[index];
}
double GetBiasWeight() const
{
return m_biasWeight;
}
double CalculateOutput()
{
double m_outputValue = 0;
for(std::size_t i = 0; i < NumInputs; ++i)
{
m_outputValue += m_inputValues[i] * m_weights[i];
}
m_outputValue += 1.0 * m_biasWeight;
m_outputValue = sigmoid(m_outputValue);
return m_outputValue;
}
double GetOutput() const
{
return m_outputValue;
}
double GetEligibilityTrace(const std::size_t index) const
{
return m_eligibilityTraces[index];
}
void SetEligibilityTrace(const std::size_t index, const double eligibility)
{
m_eligibilityTraces[index] = eligibility;
}
void SetBiasEligibility(const double eligibility)
{
m_biasEligibilityTrace = eligibility;
}
double GetBiasEligibility() const
{
return m_biasEligibilityTrace;
}
private:
std::array<double,NumInputs> m_inputValues;
std::array<double,NumInputs> m_weights;
std::array<double,NumInputs> m_eligibilityTraces;
double m_biasWeight;
double m_biasEligibilityTrace;
double m_outputValue;
};
Neural network
template<std::size_t NumInputs, std::size_t NumHidden, std::size_t NumOutputs>
class NeuralNetwork
{
public:
...
std::array<double,NumOutputs> FeedForward(const std::array<double,NumInputs>& inputValues)
{
for(auto& hiddenNeuron : m_hiddenNeurons)
{
for(std::size_t i = 0; i < NumInputs; ++i)
hiddenNeuron.SetInputValue(i,inputValues[i]);
hiddenNeuron.CalculateOutput();
}
std::array<double, NumOutputs> returnValue;
for(std::size_t h = 0; h < NumHidden; ++h)
{
auto hiddenOutput = m_hiddenNeurons[h].GetOutput();
for(std::size_t o = 0; o < NumOutputs; ++o)
m_outputNeurons[o].SetInputValue(h, hiddenOutput);
}
for(std::size_t o = 0; o < NumOutputs; ++o)
{
returnValue[o] = m_outputNeurons[o].CalculateOutput();
}
return returnValue;
}
private:
std::array<Neuron<NumInputs>,NumHidden> m_hiddenNeurons;
std::array<Neuron<NumHidden>,NumOutputs> m_outputNeurons;
};
Everything works fine for NeuralNetwork<86,86,2>, but after considering that I need some more input variables, i.e. NeuralNetwork<170,170,2>The method FeedForwardcreates a stack overflow when I turned on the -O2compiler flag . This problem does not occur when the flag is set -g.
If I delete this section of the method FeedForward, I do not get a stack overflow:
for(std::size_t h = 0; h < NumHidden; ++h)
{
auto hiddenOutput = m_hiddenNeurons[h].GetOutput();
for(std::size_t o = 0; o < NumOutputs; ++o)
m_outputNeurons[o].SetInputValue(h, hiddenOutput);
}
, . - 170, - 2; , , , , 170 170 .
Neuron, GetOutput() , SetInputValue() . .
. .
.. :
for(std::size_t h = 0; h < NumHidden; ++h)
{
auto hiddenOutput = m_hiddenNeurons[h].GetOutput();
}
for(std::size_t o = 0; o < NumOutputs; ++o)
{
returnValue[o] = m_outputNeurons[o].CalculateOutput();
}
:
for(std::size_t h = 0; h < NumHidden; ++h)
{
auto hiddenOutput = m_hiddenNeurons[h].GetOutput();
}
for(std::size_t o = 0; o < NumOutputs; ++o)
{
}
, ...