All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Modules Pages
neuron.cpp
Go to the documentation of this file.
1 // Copyright 2008 Google Inc.
2 // All Rights Reserved.
3 // Author: ahmadab@google.com (Ahmad Abdulkader)
4 //
5 // neuron.cpp: The implementation of a class for an object
6 // that represents a single neuron in a neural network
7 
8 #include "neuron.h"
9 #include "input_file_buffer.h"
10 
11 namespace tesseract {
12 
13 // Instantiate all supported templates
14 template bool Neuron::ReadBinary(InputFileBuffer *input_buffer);
15 
16 // default and only constructor
18  Init();
19 }
20 
21 // virtual destructor
23 }
24 
25 // Initializer
26 void Neuron::Init() {
27  id_ = -1;
28  frwd_dirty_ = false;
29  fan_in_.clear();
30  fan_in_weights_.clear();
31  activation_ = 0.0f;
32  output_ = 0.0f;
33  bias_ = 0.0f;
35 }
36 
37 // Computes the activation and output of the neuron if not fresh
38 // by pulling the outputs of all fan-in neurons
40  if (!frwd_dirty_ ) {
41  return;
42  }
43  // nothing to do for input nodes: just pass the input to the o/p
44  // otherwise, pull the output of all fan-in neurons
45  if (node_type_ != Input) {
46  int fan_in_cnt = fan_in_.size();
47  // sum out the activation
48  activation_ = -bias_;
49  for (int in = 0; in < fan_in_cnt; in++) {
50  if (fan_in_[in]->frwd_dirty_) {
51  fan_in_[in]->FeedForward();
52  }
53  activation_ += ((*(fan_in_weights_[in])) * fan_in_[in]->output_);
54  }
55  // sigmoid it
57  }
58  frwd_dirty_ = false;
59 }
60 
61 // set the type of the neuron
63  node_type_ = Type;
64 }
65 
66 // Adds new connections *to* this neuron *From*
67 // a target neuron using specfied params
68 // Note that what is actually copied in this function are pointers to the
69 // specified Neurons and weights and not the actualt values. This is by
70 // design to centralize the alloction of neurons and weights and so
71 // increase the locality of reference and improve cache-hits resulting
72 // in a faster net. This technique resulted in a 2X-10X speedup
73 // (depending on network size and processor)
75  float *wts_offset,
76  int from_cnt) {
77  for (int in = 0; in < from_cnt; in++) {
78  fan_in_.push_back(neurons + in);
79  fan_in_weights_.push_back(wts_offset + in);
80  }
81 }
82 
83 // fast computation of sigmoid function using a lookup table
84 // defined in sigmoid_table.cpp
85 float Neuron::Sigmoid(float activation) {
86  if (activation <= -10.0f) {
87  return 0.0f;
88  } else if (activation >= 10.0f) {
89  return 1.0f;
90  } else {
91  return kSigmoidTable[static_cast<int>(100 * (activation + 10.0))];
92  }
93 }
94 }
void AddFromConnection(Neuron *neuron_vec, float *wts_offset, int from_cnt)
Definition: neuron.cpp:74
static const float kSigmoidTable[]
Definition: neuron.h:138
bool frwd_dirty_
Definition: neuron.h:141
vector< float * > fan_in_weights_
Definition: neuron.h:135
float activation_
Definition: neuron.h:129
bool ReadBinary(BuffType *input_buff)
Definition: neuron.h:42
vector< Neuron * > fan_in_
Definition: neuron.h:133
static float Sigmoid(float activation)
Definition: neuron.cpp:85
int fan_in_cnt() const
Definition: neuron.h:102
void set_node_type(NeuronTypes type)
Definition: neuron.cpp:62
NeuronTypes node_type_
Definition: neuron.h:123
void FeedForward()
Definition: neuron.cpp:39