tesseract 4.1.1
Loading...
Searching...
No Matches
series.cpp
Go to the documentation of this file.
1
2// File: series.cpp
3// Description: Runs networks in series on the same input.
4// Author: Ray Smith
5// Created: Thu May 02 08:26:06 PST 2013
6//
7// (C) Copyright 2013, Google Inc.
8// Licensed under the Apache License, Version 2.0 (the "License");
9// you may not use this file except in compliance with the License.
10// You may obtain a copy of the License at
11// http://www.apache.org/licenses/LICENSE-2.0
12// Unless required by applicable law or agreed to in writing, software
13// distributed under the License is distributed on an "AS IS" BASIS,
14// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15// See the License for the specific language governing permissions and
16// limitations under the License.
18
19#include "series.h"
20
21#include "fullyconnected.h"
22#include "networkscratch.h"
23#include "scrollview.h"
24#include "tprintf.h"
25
26namespace tesseract {
27
28// ni_ and no_ will be set by AddToStack.
29Series::Series(const STRING& name) : Plumbing(name) {
31}
32
33// Returns the shape output from the network given an input shape (which may
34// be partially unknown ie zero).
35StaticShape Series::OutputShape(const StaticShape& input_shape) const {
36 StaticShape result(input_shape);
37 int stack_size = stack_.size();
38 for (int i = 0; i < stack_size; ++i) {
39 result = stack_[i]->OutputShape(result);
40 }
41 return result;
42}
43
44// Sets up the network for training. Initializes weights using weights of
45// scale `range` picked according to the random number generator `randomizer`.
46// Note that series has its own implementation just for debug purposes.
47int Series::InitWeights(float range, TRand* randomizer) {
48 num_weights_ = 0;
49 tprintf("Num outputs,weights in Series:\n");
50 for (int i = 0; i < stack_.size(); ++i) {
51 int weights = stack_[i]->InitWeights(range, randomizer);
52 tprintf(" %s:%d, %d\n",
53 stack_[i]->spec().string(), stack_[i]->NumOutputs(), weights);
54 num_weights_ += weights;
55 }
56 tprintf("Total weights = %d\n", num_weights_);
57 return num_weights_;
58}
59
60// Recursively searches the network for softmaxes with old_no outputs,
61// and remaps their outputs according to code_map. See network.h for details.
62int Series::RemapOutputs(int old_no, const std::vector<int>& code_map) {
63 num_weights_ = 0;
64 tprintf("Num (Extended) outputs,weights in Series:\n");
65 for (int i = 0; i < stack_.size(); ++i) {
66 int weights = stack_[i]->RemapOutputs(old_no, code_map);
67 tprintf(" %s:%d, %d\n", stack_[i]->spec().string(),
68 stack_[i]->NumOutputs(), weights);
69 num_weights_ += weights;
70 }
71 tprintf("Total weights = %d\n", num_weights_);
72 no_ = stack_.back()->NumOutputs();
73 return num_weights_;
74}
75
76// Sets needs_to_backprop_ to needs_backprop and returns true if
77// needs_backprop || any weights in this network so the next layer forward
78// can be told to produce backprop for this layer if needed.
79bool Series::SetupNeedsBackprop(bool needs_backprop) {
80 needs_to_backprop_ = needs_backprop;
81 for (int i = 0; i < stack_.size(); ++i)
82 needs_backprop = stack_[i]->SetupNeedsBackprop(needs_backprop);
83 return needs_backprop;
84}
85
86// Returns an integer reduction factor that the network applies to the
87// time sequence. Assumes that any 2-d is already eliminated. Used for
88// scaling bounding boxes of truth data.
89// WARNING: if GlobalMinimax is used to vary the scale, this will return
90// the last used scale factor. Call it before any forward, and it will return
91// the minimum scale factor of the paths through the GlobalMinimax.
93 int factor = 1;
94 for (int i = 0; i < stack_.size(); ++i)
95 factor *= stack_[i]->XScaleFactor();
96 return factor;
97}
98
99// Provides the (minimum) x scale factor to the network (of interest only to
100// input units) so they can determine how to scale bounding boxes.
102 stack_[0]->CacheXScaleFactor(factor);
103}
104
105// Runs forward propagation of activations on the input line.
106// See NetworkCpp for a detailed discussion of the arguments.
107void Series::Forward(bool debug, const NetworkIO& input,
108 const TransposedArray* input_transpose,
109 NetworkScratch* scratch, NetworkIO* output) {
110 int stack_size = stack_.size();
111 ASSERT_HOST(stack_size > 1);
112 // Revolving intermediate buffers.
113 NetworkScratch::IO buffer1(input, scratch);
114 NetworkScratch::IO buffer2(input, scratch);
115 // Run each network in turn, giving the output of n as the input to n + 1,
116 // with the final network providing the real output.
117 stack_[0]->Forward(debug, input, input_transpose, scratch, buffer1);
118 for (int i = 1; i < stack_size; i += 2) {
119 stack_[i]->Forward(debug, *buffer1, nullptr, scratch,
120 i + 1 < stack_size ? buffer2 : output);
121 if (i + 1 == stack_size) return;
122 stack_[i + 1]->Forward(debug, *buffer2, nullptr, scratch,
123 i + 2 < stack_size ? buffer1 : output);
124 }
125}
126
127// Runs backward propagation of errors on the deltas line.
128// See NetworkCpp for a detailed discussion of the arguments.
129bool Series::Backward(bool debug, const NetworkIO& fwd_deltas,
130 NetworkScratch* scratch,
131 NetworkIO* back_deltas) {
132 if (!IsTraining()) return false;
133 int stack_size = stack_.size();
134 ASSERT_HOST(stack_size > 1);
135 // Revolving intermediate buffers.
136 NetworkScratch::IO buffer1(fwd_deltas, scratch);
137 NetworkScratch::IO buffer2(fwd_deltas, scratch);
138 // Run each network in reverse order, giving the back_deltas output of n as
139 // the fwd_deltas input to n-1, with the 0 network providing the real output.
140 if (!stack_.back()->IsTraining() ||
141 !stack_.back()->Backward(debug, fwd_deltas, scratch, buffer1))
142 return false;
143 for (int i = stack_size - 2; i >= 0; i -= 2) {
144 if (!stack_[i]->IsTraining() ||
145 !stack_[i]->Backward(debug, *buffer1, scratch,
146 i > 0 ? buffer2 : back_deltas))
147 return false;
148 if (i == 0) return needs_to_backprop_;
149 if (!stack_[i - 1]->IsTraining() ||
150 !stack_[i - 1]->Backward(debug, *buffer2, scratch,
151 i > 1 ? buffer1 : back_deltas))
152 return false;
153 }
154 return needs_to_backprop_;
155}
156
157// Splits the series after the given index, returning the two parts and
158// deletes itself. The first part, up to network with index last_start, goes
159// into start, and the rest goes into end.
160void Series::SplitAt(int last_start, Series** start, Series** end) {
161 *start = nullptr;
162 *end = nullptr;
163 if (last_start < 0 || last_start >= stack_.size()) {
164 tprintf("Invalid split index %d must be in range [0,%d]!\n",
165 last_start, stack_.size() - 1);
166 return;
167 }
168 Series* master_series = new Series("MasterSeries");
169 Series* boosted_series = new Series("BoostedSeries");
170 for (int s = 0; s <= last_start; ++s) {
171 if (s + 1 == stack_.size() && stack_[s]->type() == NT_SOFTMAX) {
172 // Change the softmax to a tanh.
173 auto* fc = static_cast<FullyConnected*>(stack_[s]);
174 fc->ChangeType(NT_TANH);
175 }
176 master_series->AddToStack(stack_[s]);
177 stack_[s] = nullptr;
178 }
179 for (int s = last_start + 1; s < stack_.size(); ++s) {
180 boosted_series->AddToStack(stack_[s]);
181 stack_[s] = nullptr;
182 }
183 *start = master_series;
184 *end = boosted_series;
185 delete this;
186}
187
188// Appends the elements of the src series to this, removing from src and
189// deleting it.
191 ASSERT_HOST(src->type() == NT_SERIES);
192 auto* src_series = static_cast<Series*>(src);
193 for (int s = 0; s < src_series->stack_.size(); ++s) {
194 AddToStack(src_series->stack_[s]);
195 src_series->stack_[s] = nullptr;
196 }
197 delete src;
198}
199
200
201} // namespace tesseract.
#define ASSERT_HOST(x)
Definition: errcode.h:88
DLLSYM void tprintf(const char *format,...)
Definition: tprintf.cpp:35
@ NT_SOFTMAX
Definition: network.h:68
@ NT_SERIES
Definition: network.h:54
@ NT_TANH
Definition: network.h:65
Definition: strngs.h:45
void ChangeType(NetworkType type)
NetworkType type_
Definition: network.h:293
int NumOutputs() const
Definition: network.h:123
bool needs_to_backprop_
Definition: network.h:295
bool IsTraining() const
Definition: network.h:115
int32_t num_weights_
Definition: network.h:299
NetworkType type() const
Definition: network.h:112
PointerVector< Network > stack_
Definition: plumbing.h:136
virtual void AddToStack(Network *network)
Definition: plumbing.cpp:82
STRING spec() const override
Definition: series.h:37
bool SetupNeedsBackprop(bool needs_backprop) override
Definition: series.cpp:79
bool Backward(bool debug, const NetworkIO &fwd_deltas, NetworkScratch *scratch, NetworkIO *back_deltas) override
Definition: series.cpp:129
void AppendSeries(Network *src)
Definition: series.cpp:190
int XScaleFactor() const override
Definition: series.cpp:92
Series(const STRING &name)
Definition: series.cpp:29
StaticShape OutputShape(const StaticShape &input_shape) const override
Definition: series.cpp:35
void CacheXScaleFactor(int factor) override
Definition: series.cpp:101
int InitWeights(float range, TRand *randomizer) override
Definition: series.cpp:47
void SplitAt(int last_start, Series **start, Series **end)
Definition: series.cpp:160
int RemapOutputs(int old_no, const std::vector< int > &code_map) override
Definition: series.cpp:62
void Forward(bool debug, const NetworkIO &input, const TransposedArray *input_transpose, NetworkScratch *scratch, NetworkIO *output) override
Definition: series.cpp:107