tesseract  5.0.0-alpha-619-ge9db
series.cpp
Go to the documentation of this file.
1 // File: series.cpp
3 // Description: Runs networks in series on the same input.
4 // Author: Ray Smith
5 //
6 // (C) Copyright 2013, Google Inc.
7 // Licensed under the Apache License, Version 2.0 (the "License");
8 // you may not use this file except in compliance with the License.
9 // You may obtain a copy of the License at
10 // http://www.apache.org/licenses/LICENSE-2.0
11 // Unless required by applicable law or agreed to in writing, software
12 // distributed under the License is distributed on an "AS IS" BASIS,
13 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 // See the License for the specific language governing permissions and
15 // limitations under the License.
17 
18 #include "series.h"
19 
20 #include "fullyconnected.h"
21 #include "networkscratch.h"
22 #include "scrollview.h"
23 #include "tprintf.h"
24 
25 namespace tesseract {
26 
27 // ni_ and no_ will be set by AddToStack.
28 Series::Series(const STRING& name) : Plumbing(name) {
29  type_ = NT_SERIES;
30 }
31 
32 // Returns the shape output from the network given an input shape (which may
33 // be partially unknown ie zero).
34 StaticShape Series::OutputShape(const StaticShape& input_shape) const {
35  StaticShape result(input_shape);
36  int stack_size = stack_.size();
37  for (int i = 0; i < stack_size; ++i) {
38  result = stack_[i]->OutputShape(result);
39  }
40  return result;
41 }
42 
43 // Sets up the network for training. Initializes weights using weights of
44 // scale `range` picked according to the random number generator `randomizer`.
45 // Note that series has its own implementation just for debug purposes.
46 int Series::InitWeights(float range, TRand* randomizer) {
47  num_weights_ = 0;
48  tprintf("Num outputs,weights in Series:\n");
49  for (int i = 0; i < stack_.size(); ++i) {
50  int weights = stack_[i]->InitWeights(range, randomizer);
51  tprintf(" %s:%d, %d\n",
52  stack_[i]->spec().c_str(), stack_[i]->NumOutputs(), weights);
53  num_weights_ += weights;
54  }
55  tprintf("Total weights = %d\n", num_weights_);
56  return num_weights_;
57 }
58 
59 // Recursively searches the network for softmaxes with old_no outputs,
60 // and remaps their outputs according to code_map. See network.h for details.
61 int Series::RemapOutputs(int old_no, const std::vector<int>& code_map) {
62  num_weights_ = 0;
63  tprintf("Num (Extended) outputs,weights in Series:\n");
64  for (int i = 0; i < stack_.size(); ++i) {
65  int weights = stack_[i]->RemapOutputs(old_no, code_map);
66  tprintf(" %s:%d, %d\n", stack_[i]->spec().c_str(),
67  stack_[i]->NumOutputs(), weights);
68  num_weights_ += weights;
69  }
70  tprintf("Total weights = %d\n", num_weights_);
71  no_ = stack_.back()->NumOutputs();
72  return num_weights_;
73 }
74 
75 // Sets needs_to_backprop_ to needs_backprop and returns true if
76 // needs_backprop || any weights in this network so the next layer forward
77 // can be told to produce backprop for this layer if needed.
78 bool Series::SetupNeedsBackprop(bool needs_backprop) {
79  needs_to_backprop_ = needs_backprop;
80  for (int i = 0; i < stack_.size(); ++i)
81  needs_backprop = stack_[i]->SetupNeedsBackprop(needs_backprop);
82  return needs_backprop;
83 }
84 
85 // Returns an integer reduction factor that the network applies to the
86 // time sequence. Assumes that any 2-d is already eliminated. Used for
87 // scaling bounding boxes of truth data.
88 // WARNING: if GlobalMinimax is used to vary the scale, this will return
89 // the last used scale factor. Call it before any forward, and it will return
90 // the minimum scale factor of the paths through the GlobalMinimax.
91 int Series::XScaleFactor() const {
92  int factor = 1;
93  for (int i = 0; i < stack_.size(); ++i)
94  factor *= stack_[i]->XScaleFactor();
95  return factor;
96 }
97 
98 // Provides the (minimum) x scale factor to the network (of interest only to
99 // input units) so they can determine how to scale bounding boxes.
100 void Series::CacheXScaleFactor(int factor) {
101  stack_[0]->CacheXScaleFactor(factor);
102 }
103 
104 // Runs forward propagation of activations on the input line.
105 // See NetworkCpp for a detailed discussion of the arguments.
106 void Series::Forward(bool debug, const NetworkIO& input,
107  const TransposedArray* input_transpose,
108  NetworkScratch* scratch, NetworkIO* output) {
109  int stack_size = stack_.size();
110  ASSERT_HOST(stack_size > 1);
111  // Revolving intermediate buffers.
112  NetworkScratch::IO buffer1(input, scratch);
113  NetworkScratch::IO buffer2(input, scratch);
114  // Run each network in turn, giving the output of n as the input to n + 1,
115  // with the final network providing the real output.
116  stack_[0]->Forward(debug, input, input_transpose, scratch, buffer1);
117  for (int i = 1; i < stack_size; i += 2) {
118  stack_[i]->Forward(debug, *buffer1, nullptr, scratch,
119  i + 1 < stack_size ? buffer2 : output);
120  if (i + 1 == stack_size) return;
121  stack_[i + 1]->Forward(debug, *buffer2, nullptr, scratch,
122  i + 2 < stack_size ? buffer1 : output);
123  }
124 }
125 
126 // Runs backward propagation of errors on the deltas line.
127 // See NetworkCpp for a detailed discussion of the arguments.
128 bool Series::Backward(bool debug, const NetworkIO& fwd_deltas,
129  NetworkScratch* scratch,
130  NetworkIO* back_deltas) {
131  if (!IsTraining()) return false;
132  int stack_size = stack_.size();
133  ASSERT_HOST(stack_size > 1);
134  // Revolving intermediate buffers.
135  NetworkScratch::IO buffer1(fwd_deltas, scratch);
136  NetworkScratch::IO buffer2(fwd_deltas, scratch);
137  // Run each network in reverse order, giving the back_deltas output of n as
138  // the fwd_deltas input to n-1, with the 0 network providing the real output.
139  if (!stack_.back()->IsTraining() ||
140  !stack_.back()->Backward(debug, fwd_deltas, scratch, buffer1))
141  return false;
142  for (int i = stack_size - 2; i >= 0; i -= 2) {
143  if (!stack_[i]->IsTraining() ||
144  !stack_[i]->Backward(debug, *buffer1, scratch,
145  i > 0 ? buffer2 : back_deltas))
146  return false;
147  if (i == 0) return needs_to_backprop_;
148  if (!stack_[i - 1]->IsTraining() ||
149  !stack_[i - 1]->Backward(debug, *buffer2, scratch,
150  i > 1 ? buffer1 : back_deltas))
151  return false;
152  }
153  return needs_to_backprop_;
154 }
155 
156 // Splits the series after the given index, returning the two parts and
157 // deletes itself. The first part, up to network with index last_start, goes
158 // into start, and the rest goes into end.
159 void Series::SplitAt(int last_start, Series** start, Series** end) {
160  *start = nullptr;
161  *end = nullptr;
162  if (last_start < 0 || last_start >= stack_.size()) {
163  tprintf("Invalid split index %d must be in range [0,%d]!\n",
164  last_start, stack_.size() - 1);
165  return;
166  }
167  Series* master_series = new Series("MasterSeries");
168  Series* boosted_series = new Series("BoostedSeries");
169  for (int s = 0; s <= last_start; ++s) {
170  if (s + 1 == stack_.size() && stack_[s]->type() == NT_SOFTMAX) {
171  // Change the softmax to a tanh.
172  auto* fc = static_cast<FullyConnected*>(stack_[s]);
173  fc->ChangeType(NT_TANH);
174  }
175  master_series->AddToStack(stack_[s]);
176  stack_[s] = nullptr;
177  }
178  for (int s = last_start + 1; s < stack_.size(); ++s) {
179  boosted_series->AddToStack(stack_[s]);
180  stack_[s] = nullptr;
181  }
182  *start = master_series;
183  *end = boosted_series;
184  delete this;
185 }
186 
187 // Appends the elements of the src series to this, removing from src and
188 // deleting it.
190  ASSERT_HOST(src->type() == NT_SERIES);
191  auto* src_series = static_cast<Series*>(src);
192  for (int s = 0; s < src_series->stack_.size(); ++s) {
193  AddToStack(src_series->stack_[s]);
194  src_series->stack_[s] = nullptr;
195  }
196  delete src;
197 }
198 
199 
200 } // namespace tesseract.
tesseract::Series::SplitAt
void SplitAt(int last_start, Series **start, Series **end)
Definition: series.cpp:159
tesseract::StaticShape
Definition: static_shape.h:38
tesseract::Plumbing::AddToStack
virtual void AddToStack(Network *network)
Definition: plumbing.cpp:82
ASSERT_HOST
#define ASSERT_HOST(x)
Definition: errcode.h:87
tesseract::Series
Definition: series.h:27
tesseract::Series::Backward
bool Backward(bool debug, const NetworkIO &fwd_deltas, NetworkScratch *scratch, NetworkIO *back_deltas) override
Definition: series.cpp:128
STRING
Definition: strngs.h:45
tesseract::Series::XScaleFactor
int XScaleFactor() const override
Definition: series.cpp:91
tesseract::NetworkScratch
Definition: networkscratch.h:34
tesseract::Network::type
NetworkType type() const
Definition: network.h:112
tesseract::Network::needs_to_backprop_
bool needs_to_backprop_
Definition: network.h:295
tesseract::Plumbing::stack_
PointerVector< Network > stack_
Definition: plumbing.h:136
tesseract::Network::IsTraining
bool IsTraining() const
Definition: network.h:115
networkscratch.h
tesseract::Series::Series
Series(const STRING &name)
Definition: series.cpp:28
tesseract::Series::OutputShape
StaticShape OutputShape(const StaticShape &input_shape) const override
Definition: series.cpp:34
tesseract::NT_SERIES
Definition: network.h:54
tesseract::Series::InitWeights
int InitWeights(float range, TRand *randomizer) override
Definition: series.cpp:46
tesseract::Network::type_
NetworkType type_
Definition: network.h:293
tesseract::NT_TANH
Definition: network.h:65
tesseract::NetworkIO
Definition: networkio.h:39
tesseract::Plumbing
Definition: plumbing.h:30
tesseract::Series::spec
STRING spec() const override
Definition: series.h:37
tesseract
Definition: baseapi.h:65
tesseract::Series::CacheXScaleFactor
void CacheXScaleFactor(int factor) override
Definition: series.cpp:100
tesseract::Network::NumOutputs
int NumOutputs() const
Definition: network.h:123
tprintf.h
tesseract::NetworkScratch::IO
Definition: networkscratch.h:51
tesseract::Series::AppendSeries
void AppendSeries(Network *src)
Definition: series.cpp:189
tesseract::Network
Definition: network.h:105
tesseract::Network::num_weights_
int32_t num_weights_
Definition: network.h:299
series.h
fullyconnected.h
tesseract::TransposedArray
Definition: weightmatrix.h:32
tesseract::Series::Forward
void Forward(bool debug, const NetworkIO &input, const TransposedArray *input_transpose, NetworkScratch *scratch, NetworkIO *output) override
Definition: series.cpp:106
tesseract::Series::RemapOutputs
int RemapOutputs(int old_no, const std::vector< int > &code_map) override
Definition: series.cpp:61
tprintf
DLLSYM void tprintf(const char *format,...)
Definition: tprintf.cpp:34
tesseract::Series::SetupNeedsBackprop
bool SetupNeedsBackprop(bool needs_backprop) override
Definition: series.cpp:78
tesseract::Network::no_
int32_t no_
Definition: network.h:298
tesseract::TRand
Definition: helpers.h:50
scrollview.h
tesseract::NT_SOFTMAX
Definition: network.h:68