tesseract  4.0.0-1-g2a2b
cluster.cpp
Go to the documentation of this file.
1 /******************************************************************************
2  ** Filename: cluster.c
3  ** Purpose: Routines for clustering points in N-D space
4  ** Author: Dan Johnson
5  **
6  ** (c) Copyright Hewlett-Packard Company, 1988.
7  ** Licensed under the Apache License, Version 2.0 (the "License");
8  ** you may not use this file except in compliance with the License.
9  ** You may obtain a copy of the License at
10  ** http://www.apache.org/licenses/LICENSE-2.0
11  ** Unless required by applicable law or agreed to in writing, software
12  ** distributed under the License is distributed on an "AS IS" BASIS,
13  ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  ** See the License for the specific language governing permissions and
15  ** limitations under the License.
16  *****************************************************************************/
17 
18 #include <cfloat> // for FLT_MAX
19 #include <cmath>
20 #include <vector> // for std::vector
21 
22 #include "cluster.h"
23 #include "cutil.h" // for void_proc
24 #include "emalloc.h"
25 #include "genericheap.h"
26 #include "helpers.h"
27 #include "kdpair.h"
28 #include "matrix.h"
29 #include "tprintf.h"
30 
31 #define HOTELLING 1 // If true use Hotelling's test to decide where to split.
32 #define FTABLE_X 10 // Size of FTable.
33 #define FTABLE_Y 100 // Size of FTable.
34 
35 // Table of values approximating the cumulative F-distribution for a confidence of 1%.
36 const double FTable[FTABLE_Y][FTABLE_X] = {
37  {4052.19, 4999.52, 5403.34, 5624.62, 5763.65, 5858.97, 5928.33, 5981.10, 6022.50, 6055.85,},
38  {98.502, 99.000, 99.166, 99.249, 99.300, 99.333, 99.356, 99.374, 99.388, 99.399,},
39  {34.116, 30.816, 29.457, 28.710, 28.237, 27.911, 27.672, 27.489, 27.345, 27.229,},
40  {21.198, 18.000, 16.694, 15.977, 15.522, 15.207, 14.976, 14.799, 14.659, 14.546,},
41  {16.258, 13.274, 12.060, 11.392, 10.967, 10.672, 10.456, 10.289, 10.158, 10.051,},
42  {13.745, 10.925, 9.780, 9.148, 8.746, 8.466, 8.260, 8.102, 7.976, 7.874,},
43  {12.246, 9.547, 8.451, 7.847, 7.460, 7.191, 6.993, 6.840, 6.719, 6.620,},
44  {11.259, 8.649, 7.591, 7.006, 6.632, 6.371, 6.178, 6.029, 5.911, 5.814,},
45  {10.561, 8.022, 6.992, 6.422, 6.057, 5.802, 5.613, 5.467, 5.351, 5.257,},
46  {10.044, 7.559, 6.552, 5.994, 5.636, 5.386, 5.200, 5.057, 4.942, 4.849,},
47  { 9.646, 7.206, 6.217, 5.668, 5.316, 5.069, 4.886, 4.744, 4.632, 4.539,},
48  { 9.330, 6.927, 5.953, 5.412, 5.064, 4.821, 4.640, 4.499, 4.388, 4.296,},
49  { 9.074, 6.701, 5.739, 5.205, 4.862, 4.620, 4.441, 4.302, 4.191, 4.100,},
50  { 8.862, 6.515, 5.564, 5.035, 4.695, 4.456, 4.278, 4.140, 4.030, 3.939,},
51  { 8.683, 6.359, 5.417, 4.893, 4.556, 4.318, 4.142, 4.004, 3.895, 3.805,},
52  { 8.531, 6.226, 5.292, 4.773, 4.437, 4.202, 4.026, 3.890, 3.780, 3.691,},
53  { 8.400, 6.112, 5.185, 4.669, 4.336, 4.102, 3.927, 3.791, 3.682, 3.593,},
54  { 8.285, 6.013, 5.092, 4.579, 4.248, 4.015, 3.841, 3.705, 3.597, 3.508,},
55  { 8.185, 5.926, 5.010, 4.500, 4.171, 3.939, 3.765, 3.631, 3.523, 3.434,},
56  { 8.096, 5.849, 4.938, 4.431, 4.103, 3.871, 3.699, 3.564, 3.457, 3.368,},
57  { 8.017, 5.780, 4.874, 4.369, 4.042, 3.812, 3.640, 3.506, 3.398, 3.310,},
58  { 7.945, 5.719, 4.817, 4.313, 3.988, 3.758, 3.587, 3.453, 3.346, 3.258,},
59  { 7.881, 5.664, 4.765, 4.264, 3.939, 3.710, 3.539, 3.406, 3.299, 3.211,},
60  { 7.823, 5.614, 4.718, 4.218, 3.895, 3.667, 3.496, 3.363, 3.256, 3.168,},
61  { 7.770, 5.568, 4.675, 4.177, 3.855, 3.627, 3.457, 3.324, 3.217, 3.129,},
62  { 7.721, 5.526, 4.637, 4.140, 3.818, 3.591, 3.421, 3.288, 3.182, 3.094,},
63  { 7.677, 5.488, 4.601, 4.106, 3.785, 3.558, 3.388, 3.256, 3.149, 3.062,},
64  { 7.636, 5.453, 4.568, 4.074, 3.754, 3.528, 3.358, 3.226, 3.120, 3.032,},
65  { 7.598, 5.420, 4.538, 4.045, 3.725, 3.499, 3.330, 3.198, 3.092, 3.005,},
66  { 7.562, 5.390, 4.510, 4.018, 3.699, 3.473, 3.305, 3.173, 3.067, 2.979,},
67  { 7.530, 5.362, 4.484, 3.993, 3.675, 3.449, 3.281, 3.149, 3.043, 2.955,},
68  { 7.499, 5.336, 4.459, 3.969, 3.652, 3.427, 3.258, 3.127, 3.021, 2.934,},
69  { 7.471, 5.312, 4.437, 3.948, 3.630, 3.406, 3.238, 3.106, 3.000, 2.913,},
70  { 7.444, 5.289, 4.416, 3.927, 3.611, 3.386, 3.218, 3.087, 2.981, 2.894,},
71  { 7.419, 5.268, 4.396, 3.908, 3.592, 3.368, 3.200, 3.069, 2.963, 2.876,},
72  { 7.396, 5.248, 4.377, 3.890, 3.574, 3.351, 3.183, 3.052, 2.946, 2.859,},
73  { 7.373, 5.229, 4.360, 3.873, 3.558, 3.334, 3.167, 3.036, 2.930, 2.843,},
74  { 7.353, 5.211, 4.343, 3.858, 3.542, 3.319, 3.152, 3.021, 2.915, 2.828,},
75  { 7.333, 5.194, 4.327, 3.843, 3.528, 3.305, 3.137, 3.006, 2.901, 2.814,},
76  { 7.314, 5.179, 4.313, 3.828, 3.514, 3.291, 3.124, 2.993, 2.888, 2.801,},
77  { 7.296, 5.163, 4.299, 3.815, 3.501, 3.278, 3.111, 2.980, 2.875, 2.788,},
78  { 7.280, 5.149, 4.285, 3.802, 3.488, 3.266, 3.099, 2.968, 2.863, 2.776,},
79  { 7.264, 5.136, 4.273, 3.790, 3.476, 3.254, 3.087, 2.957, 2.851, 2.764,},
80  { 7.248, 5.123, 4.261, 3.778, 3.465, 3.243, 3.076, 2.946, 2.840, 2.754,},
81  { 7.234, 5.110, 4.249, 3.767, 3.454, 3.232, 3.066, 2.935, 2.830, 2.743,},
82  { 7.220, 5.099, 4.238, 3.757, 3.444, 3.222, 3.056, 2.925, 2.820, 2.733,},
83  { 7.207, 5.087, 4.228, 3.747, 3.434, 3.213, 3.046, 2.916, 2.811, 2.724,},
84  { 7.194, 5.077, 4.218, 3.737, 3.425, 3.204, 3.037, 2.907, 2.802, 2.715,},
85  { 7.182, 5.066, 4.208, 3.728, 3.416, 3.195, 3.028, 2.898, 2.793, 2.706,},
86  { 7.171, 5.057, 4.199, 3.720, 3.408, 3.186, 3.020, 2.890, 2.785, 2.698,},
87  { 7.159, 5.047, 4.191, 3.711, 3.400, 3.178, 3.012, 2.882, 2.777, 2.690,},
88  { 7.149, 5.038, 4.182, 3.703, 3.392, 3.171, 3.005, 2.874, 2.769, 2.683,},
89  { 7.139, 5.030, 4.174, 3.695, 3.384, 3.163, 2.997, 2.867, 2.762, 2.675,},
90  { 7.129, 5.021, 4.167, 3.688, 3.377, 3.156, 2.990, 2.860, 2.755, 2.668,},
91  { 7.119, 5.013, 4.159, 3.681, 3.370, 3.149, 2.983, 2.853, 2.748, 2.662,},
92  { 7.110, 5.006, 4.152, 3.674, 3.363, 3.143, 2.977, 2.847, 2.742, 2.655,},
93  { 7.102, 4.998, 4.145, 3.667, 3.357, 3.136, 2.971, 2.841, 2.736, 2.649,},
94  { 7.093, 4.991, 4.138, 3.661, 3.351, 3.130, 2.965, 2.835, 2.730, 2.643,},
95  { 7.085, 4.984, 4.132, 3.655, 3.345, 3.124, 2.959, 2.829, 2.724, 2.637,},
96  { 7.077, 4.977, 4.126, 3.649, 3.339, 3.119, 2.953, 2.823, 2.718, 2.632,},
97  { 7.070, 4.971, 4.120, 3.643, 3.333, 3.113, 2.948, 2.818, 2.713, 2.626,},
98  { 7.062, 4.965, 4.114, 3.638, 3.328, 3.108, 2.942, 2.813, 2.708, 2.621,},
99  { 7.055, 4.959, 4.109, 3.632, 3.323, 3.103, 2.937, 2.808, 2.703, 2.616,},
100  { 7.048, 4.953, 4.103, 3.627, 3.318, 3.098, 2.932, 2.803, 2.698, 2.611,},
101  { 7.042, 4.947, 4.098, 3.622, 3.313, 3.093, 2.928, 2.798, 2.693, 2.607,},
102  { 7.035, 4.942, 4.093, 3.618, 3.308, 3.088, 2.923, 2.793, 2.689, 2.602,},
103  { 7.029, 4.937, 4.088, 3.613, 3.304, 3.084, 2.919, 2.789, 2.684, 2.598,},
104  { 7.023, 4.932, 4.083, 3.608, 3.299, 3.080, 2.914, 2.785, 2.680, 2.593,},
105  { 7.017, 4.927, 4.079, 3.604, 3.295, 3.075, 2.910, 2.781, 2.676, 2.589,},
106  { 7.011, 4.922, 4.074, 3.600, 3.291, 3.071, 2.906, 2.777, 2.672, 2.585,},
107  { 7.006, 4.917, 4.070, 3.596, 3.287, 3.067, 2.902, 2.773, 2.668, 2.581,},
108  { 7.001, 4.913, 4.066, 3.591, 3.283, 3.063, 2.898, 2.769, 2.664, 2.578,},
109  { 6.995, 4.908, 4.062, 3.588, 3.279, 3.060, 2.895, 2.765, 2.660, 2.574,},
110  { 6.990, 4.904, 4.058, 3.584, 3.275, 3.056, 2.891, 2.762, 2.657, 2.570,},
111  { 6.985, 4.900, 4.054, 3.580, 3.272, 3.052, 2.887, 2.758, 2.653, 2.567,},
112  { 6.981, 4.896, 4.050, 3.577, 3.268, 3.049, 2.884, 2.755, 2.650, 2.563,},
113  { 6.976, 4.892, 4.047, 3.573, 3.265, 3.046, 2.881, 2.751, 2.647, 2.560,},
114  { 6.971, 4.888, 4.043, 3.570, 3.261, 3.042, 2.877, 2.748, 2.644, 2.557,},
115  { 6.967, 4.884, 4.040, 3.566, 3.258, 3.039, 2.874, 2.745, 2.640, 2.554,},
116  { 6.963, 4.881, 4.036, 3.563, 3.255, 3.036, 2.871, 2.742, 2.637, 2.551,},
117  { 6.958, 4.877, 4.033, 3.560, 3.252, 3.033, 2.868, 2.739, 2.634, 2.548,},
118  { 6.954, 4.874, 4.030, 3.557, 3.249, 3.030, 2.865, 2.736, 2.632, 2.545,},
119  { 6.950, 4.870, 4.027, 3.554, 3.246, 3.027, 2.863, 2.733, 2.629, 2.542,},
120  { 6.947, 4.867, 4.024, 3.551, 3.243, 3.025, 2.860, 2.731, 2.626, 2.539,},
121  { 6.943, 4.864, 4.021, 3.548, 3.240, 3.022, 2.857, 2.728, 2.623, 2.537,},
122  { 6.939, 4.861, 4.018, 3.545, 3.238, 3.019, 2.854, 2.725, 2.621, 2.534,},
123  { 6.935, 4.858, 4.015, 3.543, 3.235, 3.017, 2.852, 2.723, 2.618, 2.532,},
124  { 6.932, 4.855, 4.012, 3.540, 3.233, 3.014, 2.849, 2.720, 2.616, 2.529,},
125  { 6.928, 4.852, 4.010, 3.538, 3.230, 3.012, 2.847, 2.718, 2.613, 2.527,},
126  { 6.925, 4.849, 4.007, 3.535, 3.228, 3.009, 2.845, 2.715, 2.611, 2.524,},
127  { 6.922, 4.846, 4.004, 3.533, 3.225, 3.007, 2.842, 2.713, 2.609, 2.522,},
128  { 6.919, 4.844, 4.002, 3.530, 3.223, 3.004, 2.840, 2.711, 2.606, 2.520,},
129  { 6.915, 4.841, 3.999, 3.528, 3.221, 3.002, 2.838, 2.709, 2.604, 2.518,},
130  { 6.912, 4.838, 3.997, 3.525, 3.218, 3.000, 2.835, 2.706, 2.602, 2.515,},
131  { 6.909, 4.836, 3.995, 3.523, 3.216, 2.998, 2.833, 2.704, 2.600, 2.513,},
132  { 6.906, 4.833, 3.992, 3.521, 3.214, 2.996, 2.831, 2.702, 2.598, 2.511,},
133  { 6.904, 4.831, 3.990, 3.519, 3.212, 2.994, 2.829, 2.700, 2.596, 2.509,},
134  { 6.901, 4.829, 3.988, 3.517, 3.210, 2.992, 2.827, 2.698, 2.594, 2.507,},
135  { 6.898, 4.826, 3.986, 3.515, 3.208, 2.990, 2.825, 2.696, 2.592, 2.505,},
136  { 6.895, 4.824, 3.984, 3.513, 3.206, 2.988, 2.823, 2.694, 2.590, 2.503}
137 };
138 
143 #define MINVARIANCE 0.0004
144 
151 #define MINSAMPLESPERBUCKET 5
152 #define MINSAMPLES (MINBUCKETS * MINSAMPLESPERBUCKET)
153 #define MINSAMPLESNEEDED 1
154 
161 #define BUCKETTABLESIZE 1024
162 #define NORMALEXTENT 3.0
163 
164 struct TEMPCLUSTER {
167 };
168 
171 
172 struct STATISTICS {
173  float AvgVariance;
174  float *CoVariance;
175  float *Min; // largest negative distance from the mean
176  float *Max; // largest positive distance from the mean
177 };
178 
179 struct BUCKETS {
180  DISTRIBUTION Distribution; // distribution being tested for
181  uint32_t SampleCount; // # of samples in histogram
182  double Confidence; // confidence level of test
183  double ChiSquared; // test threshold
184  uint16_t NumberOfBuckets; // number of cells in histogram
185  uint16_t Bucket[BUCKETTABLESIZE]; // mapping to histogram buckets
186  uint32_t *Count; // frequency of occurrence histogram
187  float *ExpectedCount; // expected histogram
188 };
189 
190 struct CHISTRUCT{
192  double Alpha;
193  double ChiSquared;
194 };
195 
196 // For use with KDWalk / MakePotentialClusters
198  ClusterHeap *heap; // heap used to hold temp clusters, "best" on top
199  TEMPCLUSTER *candidates; // array of potential clusters
200  KDTREE *tree; // kd-tree to be searched for neighbors
201  int32_t next; // next candidate to be used
202 };
203 
204 typedef double (*DENSITYFUNC) (int32_t);
205 typedef double (*SOLVEFUNC) (CHISTRUCT *, double);
206 
207 #define Odd(N) ((N)%2)
208 #define Mirror(N,R) ((R) - (N) - 1)
209 #define Abs(N) (((N) < 0) ? (-(N)) : (N))
210 
211 //--------------Global Data Definitions and Declarations----------------------
219 #define SqrtOf2Pi 2.506628275
220 static const double kNormalStdDev = BUCKETTABLESIZE / (2.0 * NORMALEXTENT);
221 static const double kNormalVariance =
223 static const double kNormalMagnitude =
224  (2.0 * NORMALEXTENT) / (SqrtOf2Pi * BUCKETTABLESIZE);
225 static const double kNormalMean = BUCKETTABLESIZE / 2;
226 
229 #define LOOKUPTABLESIZE 8
230 #define MAXDEGREESOFFREEDOM MAXBUCKETS
231 
232 static const uint32_t kCountTable[LOOKUPTABLESIZE] = {
233  MINSAMPLES, 200, 400, 600, 800, 1000, 1500, 2000
234 }; // number of samples
235 
236 static const uint16_t kBucketsTable[LOOKUPTABLESIZE] = {
237  MINBUCKETS, 16, 20, 24, 27, 30, 35, MAXBUCKETS
238 }; // number of buckets
239 
240 /*-------------------------------------------------------------------------
241  Private Function Prototypes
242 --------------------------------------------------------------------------*/
243 void CreateClusterTree(CLUSTERER *Clusterer);
244 
245 void MakePotentialClusters(ClusteringContext *context, CLUSTER *Cluster,
246  int32_t Level);
247 
249  CLUSTER *Cluster,
250  float *Distance);
251 
252 CLUSTER *MakeNewCluster(CLUSTERER *Clusterer, TEMPCLUSTER *TempCluster);
253 
254 int32_t MergeClusters (int16_t N,
255 PARAM_DESC ParamDesc[],
256 int32_t n1,
257 int32_t n2,
258 float m[],
259 float m1[], float m2[]);
260 
262 
263 PROTOTYPE *MakePrototype(CLUSTERER *Clusterer,
265  CLUSTER *Cluster);
266 
267 PROTOTYPE *MakeDegenerateProto(uint16_t N,
268  CLUSTER *Cluster,
269  STATISTICS *Statistics,
270  PROTOSTYLE Style,
271  int32_t MinSamples);
272 
275  CLUSTER *Cluster,
276  STATISTICS *Statistics);
277 
279  CLUSTER *Cluster,
280  STATISTICS *Statistics,
281  BUCKETS *Buckets);
282 
284  CLUSTER *Cluster,
285  STATISTICS *Statistics,
286  BUCKETS *Buckets);
287 
289  CLUSTER *Cluster,
290  STATISTICS *Statistics,
291  BUCKETS *NormalBuckets,
292  double Confidence);
293 
294 void MakeDimRandom(uint16_t i, PROTOTYPE *Proto, PARAM_DESC *ParamDesc);
295 
296 void MakeDimUniform(uint16_t i, PROTOTYPE *Proto, STATISTICS *Statistics);
297 
298 STATISTICS *ComputeStatistics (int16_t N,
299 PARAM_DESC ParamDesc[], CLUSTER * Cluster);
300 
301 PROTOTYPE *NewSphericalProto(uint16_t N,
302  CLUSTER *Cluster,
303  STATISTICS *Statistics);
304 
305 PROTOTYPE *NewEllipticalProto(int16_t N,
306  CLUSTER *Cluster,
307  STATISTICS *Statistics);
308 
309 PROTOTYPE *NewMixedProto(int16_t N, CLUSTER *Cluster, STATISTICS *Statistics);
310 
311 PROTOTYPE *NewSimpleProto(int16_t N, CLUSTER *Cluster);
312 
313 bool Independent(PARAM_DESC* ParamDesc,
314  int16_t N, float* CoVariance, float Independence);
315 
316 BUCKETS *GetBuckets(CLUSTERER* clusterer,
317  DISTRIBUTION Distribution,
318  uint32_t SampleCount,
319  double Confidence);
320 
321 BUCKETS *MakeBuckets(DISTRIBUTION Distribution,
322  uint32_t SampleCount,
323  double Confidence);
324 
325 uint16_t OptimumNumberOfBuckets(uint32_t SampleCount);
326 
327 double ComputeChiSquared(uint16_t DegreesOfFreedom, double Alpha);
328 
329 double NormalDensity(int32_t x);
330 
331 double UniformDensity(int32_t x);
332 
333 double Integral(double f1, double f2, double Dx);
334 
335 void FillBuckets(BUCKETS *Buckets,
336  CLUSTER *Cluster,
337  uint16_t Dim,
338  PARAM_DESC *ParamDesc,
339  float Mean,
340  float StdDev);
341 
342 uint16_t NormalBucket(PARAM_DESC *ParamDesc,
343  float x,
344  float Mean,
345  float StdDev);
346 
347 uint16_t UniformBucket(PARAM_DESC *ParamDesc,
348  float x,
349  float Mean,
350  float StdDev);
351 
352 bool DistributionOK(BUCKETS* Buckets);
353 
354 void FreeStatistics(STATISTICS *Statistics);
355 
356 void FreeBuckets(BUCKETS *Buckets);
357 
358 void FreeCluster(CLUSTER *Cluster);
359 
360 uint16_t DegreesOfFreedom(DISTRIBUTION Distribution, uint16_t HistogramBuckets);
361 
362 int NumBucketsMatch(void *arg1, // BUCKETS *Histogram,
363  void *arg2); // uint16_t *DesiredNumberOfBuckets);
364 
365 int ListEntryMatch(void *arg1, void *arg2);
366 
367 void AdjustBuckets(BUCKETS *Buckets, uint32_t NewSampleCount);
368 
369 void InitBuckets(BUCKETS *Buckets);
370 
371 int AlphaMatch(void *arg1, // CHISTRUCT *ChiStruct,
372  void *arg2); // CHISTRUCT *SearchKey);
373 
374 CHISTRUCT *NewChiStruct(uint16_t DegreesOfFreedom, double Alpha);
375 
376 double Solve(SOLVEFUNC Function,
377  void *FunctionParams,
378  double InitialGuess,
379  double Accuracy);
380 
381 double ChiArea(CHISTRUCT *ChiParams, double x);
382 
383 bool MultipleCharSamples(CLUSTERER* Clusterer,
384  CLUSTER* Cluster,
385  float MaxIllegal);
386 
387 double InvertMatrix(const float* input, int size, float* inv);
388 
389 //--------------------------Public Code--------------------------------------
398 CLUSTERER *
399 MakeClusterer (int16_t SampleSize, const PARAM_DESC ParamDesc[]) {
400  CLUSTERER *Clusterer;
401  int i;
402 
403  // allocate main clusterer data structure and init simple fields
404  Clusterer = (CLUSTERER *) Emalloc (sizeof (CLUSTERER));
405  Clusterer->SampleSize = SampleSize;
406  Clusterer->NumberOfSamples = 0;
407  Clusterer->NumChar = 0;
408 
409  // init fields which will not be used initially
410  Clusterer->Root = nullptr;
411  Clusterer->ProtoList = NIL_LIST;
412 
413  // maintain a copy of param descriptors in the clusterer data structure
414  Clusterer->ParamDesc =
415  (PARAM_DESC *) Emalloc (SampleSize * sizeof (PARAM_DESC));
416  for (i = 0; i < SampleSize; i++) {
417  Clusterer->ParamDesc[i].Circular = ParamDesc[i].Circular;
418  Clusterer->ParamDesc[i].NonEssential = ParamDesc[i].NonEssential;
419  Clusterer->ParamDesc[i].Min = ParamDesc[i].Min;
420  Clusterer->ParamDesc[i].Max = ParamDesc[i].Max;
421  Clusterer->ParamDesc[i].Range = ParamDesc[i].Max - ParamDesc[i].Min;
422  Clusterer->ParamDesc[i].HalfRange = Clusterer->ParamDesc[i].Range / 2;
423  Clusterer->ParamDesc[i].MidRange =
424  (ParamDesc[i].Max + ParamDesc[i].Min) / 2;
425  }
426 
427  // allocate a kd tree to hold the samples
428  Clusterer->KDTree = MakeKDTree (SampleSize, ParamDesc);
429 
430  // Initialize cache of histogram buckets to minimize recomputing them.
431  for (int d = 0; d < DISTRIBUTION_COUNT; ++d) {
432  for (int c = 0; c < MAXBUCKETS + 1 - MINBUCKETS; ++c)
433  Clusterer->bucket_cache[d][c] = nullptr;
434  }
435 
436  return Clusterer;
437 } // MakeClusterer
438 
452 SAMPLE* MakeSample(CLUSTERER * Clusterer, const float* Feature,
453  int32_t CharID) {
454  SAMPLE *Sample;
455  int i;
456 
457  // see if the samples have already been clustered - if so trap an error
458  // Can't add samples after they have been clustered.
459  ASSERT_HOST(Clusterer->Root == nullptr);
460 
461  // allocate the new sample and initialize it
462  Sample = (SAMPLE *) Emalloc (sizeof (SAMPLE) +
463  (Clusterer->SampleSize -
464  1) * sizeof (float));
465  Sample->Clustered = FALSE;
466  Sample->Prototype = FALSE;
467  Sample->SampleCount = 1;
468  Sample->Left = nullptr;
469  Sample->Right = nullptr;
470  Sample->CharID = CharID;
471 
472  for (i = 0; i < Clusterer->SampleSize; i++)
473  Sample->Mean[i] = Feature[i];
474 
475  // add the sample to the KD tree - keep track of the total # of samples
476  Clusterer->NumberOfSamples++;
477  KDStore(Clusterer->KDTree, Sample->Mean, Sample);
478  if (CharID >= Clusterer->NumChar)
479  Clusterer->NumChar = CharID + 1;
480 
481  // execute hook for monitoring clustering operation
482  // (*SampleCreationHook)(Sample);
483 
484  return (Sample);
485 } // MakeSample
486 
507  //only create cluster tree if samples have never been clustered before
508  if (Clusterer->Root == nullptr)
509  CreateClusterTree(Clusterer);
510 
511  //deallocate the old prototype list if one exists
512  FreeProtoList (&Clusterer->ProtoList);
513  Clusterer->ProtoList = NIL_LIST;
514 
515  //compute prototypes starting at the root node in the tree
516  ComputePrototypes(Clusterer, Config);
517  // We don't need the cluster pointers in the protos any more, so null them
518  // out, which makes it safe to delete the clusterer.
519  LIST proto_list = Clusterer->ProtoList;
520  iterate(proto_list) {
521  PROTOTYPE *proto = reinterpret_cast<PROTOTYPE *>(first_node(proto_list));
522  proto->Cluster = nullptr;
523  }
524  return Clusterer->ProtoList;
525 } // ClusterSamples
526 
538 void FreeClusterer(CLUSTERER *Clusterer) {
539  if (Clusterer != nullptr) {
540  free(Clusterer->ParamDesc);
541  if (Clusterer->KDTree != nullptr)
542  FreeKDTree (Clusterer->KDTree);
543  if (Clusterer->Root != nullptr)
544  FreeCluster (Clusterer->Root);
545  // Free up all used buckets structures.
546  for (int d = 0; d < DISTRIBUTION_COUNT; ++d) {
547  for (int c = 0; c < MAXBUCKETS + 1 - MINBUCKETS; ++c)
548  if (Clusterer->bucket_cache[d][c] != nullptr)
549  FreeBuckets(Clusterer->bucket_cache[d][c]);
550  }
551 
552  free(Clusterer);
553  }
554 } // FreeClusterer
555 
563 void FreeProtoList(LIST *ProtoList) {
564  destroy_nodes(*ProtoList, FreePrototype);
565 } // FreeProtoList
566 
575 void FreePrototype(void *arg) { //PROTOTYPE *Prototype)
576  PROTOTYPE *Prototype = (PROTOTYPE *) arg;
577 
578  // unmark the corresponding cluster (if there is one
579  if (Prototype->Cluster != nullptr)
580  Prototype->Cluster->Prototype = FALSE;
581 
582  // deallocate the prototype statistics and then the prototype itself
583  free(Prototype->Distrib);
584  free(Prototype->Mean);
585  if (Prototype->Style != spherical) {
586  free(Prototype->Variance.Elliptical);
587  free(Prototype->Magnitude.Elliptical);
588  free(Prototype->Weight.Elliptical);
589  }
590  free(Prototype);
591 } // FreePrototype
592 
606 CLUSTER *NextSample(LIST *SearchState) {
607  CLUSTER *Cluster;
608 
609  if (*SearchState == NIL_LIST)
610  return (nullptr);
611  Cluster = (CLUSTER *) first_node (*SearchState);
612  *SearchState = pop (*SearchState);
613  while (TRUE) {
614  if (Cluster->Left == nullptr)
615  return (Cluster);
616  *SearchState = push (*SearchState, Cluster->Right);
617  Cluster = Cluster->Left;
618  }
619 } // NextSample
620 
628 float Mean(PROTOTYPE *Proto, uint16_t Dimension) {
629  return (Proto->Mean[Dimension]);
630 } // Mean
631 
639 float StandardDeviation(PROTOTYPE *Proto, uint16_t Dimension) {
640  switch (Proto->Style) {
641  case spherical:
642  return ((float) sqrt ((double) Proto->Variance.Spherical));
643  case elliptical:
644  return ((float)
645  sqrt ((double) Proto->Variance.Elliptical[Dimension]));
646  case mixed:
647  switch (Proto->Distrib[Dimension]) {
648  case normal:
649  return ((float)
650  sqrt ((double) Proto->Variance.Elliptical[Dimension]));
651  case uniform:
652  case D_random:
653  return (Proto->Variance.Elliptical[Dimension]);
654  case DISTRIBUTION_COUNT:
655  ASSERT_HOST(!"Distribution count not allowed!");
656  }
657  }
658  return 0.0f;
659 } // StandardDeviation
660 
661 
662 /*---------------------------------------------------------------------------
663  Private Code
664 ----------------------------------------------------------------------------*/
678 void CreateClusterTree(CLUSTERER *Clusterer) {
679  ClusteringContext context;
680  ClusterPair HeapEntry;
681  TEMPCLUSTER *PotentialCluster;
682 
683  // each sample and its nearest neighbor form a "potential" cluster
684  // save these in a heap with the "best" potential clusters on top
685  context.tree = Clusterer->KDTree;
686  context.candidates = (TEMPCLUSTER *)
687  Emalloc(Clusterer->NumberOfSamples * sizeof(TEMPCLUSTER));
688  context.next = 0;
689  context.heap = new ClusterHeap(Clusterer->NumberOfSamples);
690  KDWalk(context.tree, (void_proc)MakePotentialClusters, &context);
691 
692  // form potential clusters into actual clusters - always do "best" first
693  while (context.heap->Pop(&HeapEntry)) {
694  PotentialCluster = HeapEntry.data;
695 
696  // if main cluster of potential cluster is already in another cluster
697  // then we don't need to worry about it
698  if (PotentialCluster->Cluster->Clustered) {
699  continue;
700  }
701 
702  // if main cluster is not yet clustered, but its nearest neighbor is
703  // then we must find a new nearest neighbor
704  else if (PotentialCluster->Neighbor->Clustered) {
705  PotentialCluster->Neighbor =
706  FindNearestNeighbor(context.tree, PotentialCluster->Cluster,
707  &HeapEntry.key);
708  if (PotentialCluster->Neighbor != nullptr) {
709  context.heap->Push(&HeapEntry);
710  }
711  }
712 
713  // if neither cluster is already clustered, form permanent cluster
714  else {
715  PotentialCluster->Cluster =
716  MakeNewCluster(Clusterer, PotentialCluster);
717  PotentialCluster->Neighbor =
718  FindNearestNeighbor(context.tree, PotentialCluster->Cluster,
719  &HeapEntry.key);
720  if (PotentialCluster->Neighbor != nullptr) {
721  context.heap->Push(&HeapEntry);
722  }
723  }
724  }
725 
726  // the root node in the cluster tree is now the only node in the kd-tree
727  Clusterer->Root = (CLUSTER *) RootOf(Clusterer->KDTree);
728 
729  // free up the memory used by the K-D tree, heap, and temp clusters
730  FreeKDTree(context.tree);
731  Clusterer->KDTree = nullptr;
732  delete context.heap;
733  free(context.candidates);
734 } // CreateClusterTree
735 
746  CLUSTER *Cluster, int32_t Level) {
747  ClusterPair HeapEntry;
748  int next = context->next;
749  context->candidates[next].Cluster = Cluster;
750  HeapEntry.data = &(context->candidates[next]);
751  context->candidates[next].Neighbor =
752  FindNearestNeighbor(context->tree,
753  context->candidates[next].Cluster,
754  &HeapEntry.key);
755  if (context->candidates[next].Neighbor != nullptr) {
756  context->heap->Push(&HeapEntry);
757  context->next++;
758  }
759 } // MakePotentialClusters
760 
774 CLUSTER *
775 FindNearestNeighbor(KDTREE * Tree, CLUSTER * Cluster, float * Distance)
776 #define MAXNEIGHBORS 2
777 #define MAXDISTANCE FLT_MAX
778 {
779  CLUSTER *Neighbor[MAXNEIGHBORS];
780  float Dist[MAXNEIGHBORS];
781  int NumberOfNeighbors;
782  int32_t i;
783  CLUSTER *BestNeighbor;
784 
785  // find the 2 nearest neighbors of the cluster
787  &NumberOfNeighbors, (void **)Neighbor, Dist);
788 
789  // search for the nearest neighbor that is not the cluster itself
790  *Distance = MAXDISTANCE;
791  BestNeighbor = nullptr;
792  for (i = 0; i < NumberOfNeighbors; i++) {
793  if ((Dist[i] < *Distance) && (Neighbor[i] != Cluster)) {
794  *Distance = Dist[i];
795  BestNeighbor = Neighbor[i];
796  }
797  }
798  return BestNeighbor;
799 } // FindNearestNeighbor
800 
810 CLUSTER *MakeNewCluster(CLUSTERER *Clusterer, TEMPCLUSTER *TempCluster) {
811  CLUSTER *Cluster;
812 
813  // allocate the new cluster and initialize it
814  Cluster = (CLUSTER *) Emalloc(
815  sizeof(CLUSTER) + (Clusterer->SampleSize - 1) * sizeof(float));
816  Cluster->Clustered = FALSE;
817  Cluster->Prototype = FALSE;
818  Cluster->Left = TempCluster->Cluster;
819  Cluster->Right = TempCluster->Neighbor;
820  Cluster->CharID = -1;
821 
822  // mark the old clusters as "clustered" and delete them from the kd-tree
823  Cluster->Left->Clustered = TRUE;
824  Cluster->Right->Clustered = TRUE;
825  KDDelete(Clusterer->KDTree, Cluster->Left->Mean, Cluster->Left);
826  KDDelete(Clusterer->KDTree, Cluster->Right->Mean, Cluster->Right);
827 
828  // compute the mean and sample count for the new cluster
829  Cluster->SampleCount =
830  MergeClusters(Clusterer->SampleSize, Clusterer->ParamDesc,
831  Cluster->Left->SampleCount, Cluster->Right->SampleCount,
832  Cluster->Mean, Cluster->Left->Mean, Cluster->Right->Mean);
833 
834  // add the new cluster to the KD tree
835  KDStore(Clusterer->KDTree, Cluster->Mean, Cluster);
836  return Cluster;
837 } // MakeNewCluster
838 
852 int32_t MergeClusters(int16_t N,
853  PARAM_DESC ParamDesc[],
854  int32_t n1,
855  int32_t n2,
856  float m[],
857  float m1[], float m2[]) {
858  int32_t i, n;
859 
860  n = n1 + n2;
861  for (i = N; i > 0; i--, ParamDesc++, m++, m1++, m2++) {
862  if (ParamDesc->Circular) {
863  // if distance between means is greater than allowed
864  // reduce upper point by one "rotation" to compute mean
865  // then normalize the mean back into the accepted range
866  if ((*m2 - *m1) > ParamDesc->HalfRange) {
867  *m = (n1 * *m1 + n2 * (*m2 - ParamDesc->Range)) / n;
868  if (*m < ParamDesc->Min)
869  *m += ParamDesc->Range;
870  }
871  else if ((*m1 - *m2) > ParamDesc->HalfRange) {
872  *m = (n1 * (*m1 - ParamDesc->Range) + n2 * *m2) / n;
873  if (*m < ParamDesc->Min)
874  *m += ParamDesc->Range;
875  }
876  else
877  *m = (n1 * *m1 + n2 * *m2) / n;
878  }
879  else
880  *m = (n1 * *m1 + n2 * *m2) / n;
881  }
882  return n;
883 } // MergeClusters
884 
895  LIST ClusterStack = NIL_LIST;
896  CLUSTER *Cluster;
897  PROTOTYPE *Prototype;
898 
899  // use a stack to keep track of clusters waiting to be processed
900  // initially the only cluster on the stack is the root cluster
901  if (Clusterer->Root != nullptr)
902  ClusterStack = push (NIL_LIST, Clusterer->Root);
903 
904  // loop until we have analyzed all clusters which are potential prototypes
905  while (ClusterStack != NIL_LIST) {
906  // remove the next cluster to be analyzed from the stack
907  // try to make a prototype from the cluster
908  // if successful, put it on the proto list, else split the cluster
909  Cluster = (CLUSTER *) first_node (ClusterStack);
910  ClusterStack = pop (ClusterStack);
911  Prototype = MakePrototype(Clusterer, Config, Cluster);
912  if (Prototype != nullptr) {
913  Clusterer->ProtoList = push (Clusterer->ProtoList, Prototype);
914  }
915  else {
916  ClusterStack = push (ClusterStack, Cluster->Right);
917  ClusterStack = push (ClusterStack, Cluster->Left);
918  }
919  }
920 } // ComputePrototypes
921 
939  CLUSTER *Cluster) {
940  STATISTICS *Statistics;
941  PROTOTYPE *Proto;
942  BUCKETS *Buckets;
943 
944  // filter out clusters which contain samples from the same character
945  if (MultipleCharSamples (Clusterer, Cluster, Config->MaxIllegal))
946  return nullptr;
947 
948  // compute the covariance matrix and ranges for the cluster
949  Statistics =
950  ComputeStatistics(Clusterer->SampleSize, Clusterer->ParamDesc, Cluster);
951 
952  // check for degenerate clusters which need not be analyzed further
953  // note that the MinSamples test assumes that all clusters with multiple
954  // character samples have been removed (as above)
955  Proto = MakeDegenerateProto(
956  Clusterer->SampleSize, Cluster, Statistics, Config->ProtoStyle,
957  (int32_t) (Config->MinSamples * Clusterer->NumChar));
958  if (Proto != nullptr) {
959  FreeStatistics(Statistics);
960  return Proto;
961  }
962  // check to ensure that all dimensions are independent
963  if (!Independent(Clusterer->ParamDesc, Clusterer->SampleSize,
964  Statistics->CoVariance, Config->Independence)) {
965  FreeStatistics(Statistics);
966  return nullptr;
967  }
968 
969  if (HOTELLING && Config->ProtoStyle == elliptical) {
970  Proto = TestEllipticalProto(Clusterer, Config, Cluster, Statistics);
971  if (Proto != nullptr) {
972  FreeStatistics(Statistics);
973  return Proto;
974  }
975  }
976 
977  // create a histogram data structure used to evaluate distributions
978  Buckets = GetBuckets(Clusterer, normal, Cluster->SampleCount,
979  Config->Confidence);
980 
981  // create a prototype based on the statistics and test it
982  switch (Config->ProtoStyle) {
983  case spherical:
984  Proto = MakeSphericalProto(Clusterer, Cluster, Statistics, Buckets);
985  break;
986  case elliptical:
987  Proto = MakeEllipticalProto(Clusterer, Cluster, Statistics, Buckets);
988  break;
989  case mixed:
990  Proto = MakeMixedProto(Clusterer, Cluster, Statistics, Buckets,
991  Config->Confidence);
992  break;
993  case automatic:
994  Proto = MakeSphericalProto(Clusterer, Cluster, Statistics, Buckets);
995  if (Proto != nullptr)
996  break;
997  Proto = MakeEllipticalProto(Clusterer, Cluster, Statistics, Buckets);
998  if (Proto != nullptr)
999  break;
1000  Proto = MakeMixedProto(Clusterer, Cluster, Statistics, Buckets,
1001  Config->Confidence);
1002  break;
1003  }
1004  FreeStatistics(Statistics);
1005  return Proto;
1006 } // MakePrototype
1007 
1027 PROTOTYPE *MakeDegenerateProto( //this was MinSample
1028  uint16_t N,
1029  CLUSTER *Cluster,
1030  STATISTICS *Statistics,
1031  PROTOSTYLE Style,
1032  int32_t MinSamples) {
1033  PROTOTYPE *Proto = nullptr;
1034 
1035  if (MinSamples < MINSAMPLESNEEDED)
1036  MinSamples = MINSAMPLESNEEDED;
1037 
1038  if (Cluster->SampleCount < MinSamples) {
1039  switch (Style) {
1040  case spherical:
1041  Proto = NewSphericalProto (N, Cluster, Statistics);
1042  break;
1043  case elliptical:
1044  case automatic:
1045  Proto = NewEllipticalProto (N, Cluster, Statistics);
1046  break;
1047  case mixed:
1048  Proto = NewMixedProto (N, Cluster, Statistics);
1049  break;
1050  }
1051  Proto->Significant = FALSE;
1052  }
1053  return (Proto);
1054 } // MakeDegenerateProto
1055 
1071  CLUSTER *Cluster,
1072  STATISTICS *Statistics) {
1073  // Fraction of the number of samples used as a range around 1 within
1074  // which a cluster has the magic size that allows a boost to the
1075  // FTable by kFTableBoostMargin, thus allowing clusters near the
1076  // magic size (equal to the number of sample characters) to be more
1077  // likely to stay together.
1078  const double kMagicSampleMargin = 0.0625;
1079  const double kFTableBoostMargin = 2.0;
1080 
1081  int N = Clusterer->SampleSize;
1082  CLUSTER* Left = Cluster->Left;
1083  CLUSTER* Right = Cluster->Right;
1084  if (Left == nullptr || Right == nullptr)
1085  return nullptr;
1086  int TotalDims = Left->SampleCount + Right->SampleCount;
1087  if (TotalDims < N + 1 || TotalDims < 2)
1088  return nullptr;
1089  std::vector<float> Covariance(static_cast<size_t>(N) * N);
1090  std::vector<float> Inverse(static_cast<size_t>(N) * N);
1091  std::vector<float> Delta(N);
1092  // Compute a new covariance matrix that only uses essential features.
1093  for (int i = 0; i < N; ++i) {
1094  int row_offset = i * N;
1095  if (!Clusterer->ParamDesc[i].NonEssential) {
1096  for (int j = 0; j < N; ++j) {
1097  if (!Clusterer->ParamDesc[j].NonEssential)
1098  Covariance[j + row_offset] = Statistics->CoVariance[j + row_offset];
1099  else
1100  Covariance[j + row_offset] = 0.0f;
1101  }
1102  } else {
1103  for (int j = 0; j < N; ++j) {
1104  if (i == j)
1105  Covariance[j + row_offset] = 1.0f;
1106  else
1107  Covariance[j + row_offset] = 0.0f;
1108  }
1109  }
1110  }
1111  double err = InvertMatrix(&Covariance[0], N, &Inverse[0]);
1112  if (err > 1) {
1113  tprintf("Clustering error: Matrix inverse failed with error %g\n", err);
1114  }
1115  int EssentialN = 0;
1116  for (int dim = 0; dim < N; ++dim) {
1117  if (!Clusterer->ParamDesc[dim].NonEssential) {
1118  Delta[dim] = Left->Mean[dim] - Right->Mean[dim];
1119  ++EssentialN;
1120  } else {
1121  Delta[dim] = 0.0f;
1122  }
1123  }
1124  // Compute Hotelling's T-squared.
1125  double Tsq = 0.0;
1126  for (int x = 0; x < N; ++x) {
1127  double temp = 0.0;
1128  for (int y = 0; y < N; ++y) {
1129  temp += static_cast<double>(Inverse[y + N * x]) * Delta[y];
1130  }
1131  Tsq += Delta[x] * temp;
1132  }
1133  // Changed this function to match the formula in
1134  // Statistical Methods in Medical Research p 473
1135  // By Peter Armitage, Geoffrey Berry, J. N. S. Matthews.
1136  // Tsq *= Left->SampleCount * Right->SampleCount / TotalDims;
1137  double F = Tsq * (TotalDims - EssentialN - 1) / ((TotalDims - 2)*EssentialN);
1138  int Fx = EssentialN;
1139  if (Fx > FTABLE_X)
1140  Fx = FTABLE_X;
1141  --Fx;
1142  int Fy = TotalDims - EssentialN - 1;
1143  if (Fy > FTABLE_Y)
1144  Fy = FTABLE_Y;
1145  --Fy;
1146  double FTarget = FTable[Fy][Fx];
1147  if (Config->MagicSamples > 0 &&
1148  TotalDims >= Config->MagicSamples * (1.0 - kMagicSampleMargin) &&
1149  TotalDims <= Config->MagicSamples * (1.0 + kMagicSampleMargin)) {
1150  // Give magic-sized clusters a magic FTable boost.
1151  FTarget += kFTableBoostMargin;
1152  }
1153  if (F < FTarget) {
1154  return NewEllipticalProto (Clusterer->SampleSize, Cluster, Statistics);
1155  }
1156  return nullptr;
1157 }
1158 
1171  CLUSTER *Cluster,
1172  STATISTICS *Statistics,
1173  BUCKETS *Buckets) {
1174  PROTOTYPE *Proto = nullptr;
1175  int i;
1176 
1177  // check that each dimension is a normal distribution
1178  for (i = 0; i < Clusterer->SampleSize; i++) {
1179  if (Clusterer->ParamDesc[i].NonEssential)
1180  continue;
1181 
1182  FillBuckets (Buckets, Cluster, i, &(Clusterer->ParamDesc[i]),
1183  Cluster->Mean[i],
1184  sqrt ((double) (Statistics->AvgVariance)));
1185  if (!DistributionOK (Buckets))
1186  break;
1187  }
1188  // if all dimensions matched a normal distribution, make a proto
1189  if (i >= Clusterer->SampleSize)
1190  Proto = NewSphericalProto (Clusterer->SampleSize, Cluster, Statistics);
1191  return (Proto);
1192 } // MakeSphericalProto
1193 
1206  CLUSTER *Cluster,
1207  STATISTICS *Statistics,
1208  BUCKETS *Buckets) {
1209  PROTOTYPE *Proto = nullptr;
1210  int i;
1211 
1212  // check that each dimension is a normal distribution
1213  for (i = 0; i < Clusterer->SampleSize; i++) {
1214  if (Clusterer->ParamDesc[i].NonEssential)
1215  continue;
1216 
1217  FillBuckets (Buckets, Cluster, i, &(Clusterer->ParamDesc[i]),
1218  Cluster->Mean[i],
1219  sqrt ((double) Statistics->
1220  CoVariance[i * (Clusterer->SampleSize + 1)]));
1221  if (!DistributionOK (Buckets))
1222  break;
1223  }
1224  // if all dimensions matched a normal distribution, make a proto
1225  if (i >= Clusterer->SampleSize)
1226  Proto = NewEllipticalProto (Clusterer->SampleSize, Cluster, Statistics);
1227  return (Proto);
1228 } // MakeEllipticalProto
1229 
1246  CLUSTER *Cluster,
1247  STATISTICS *Statistics,
1248  BUCKETS *NormalBuckets,
1249  double Confidence) {
1250  PROTOTYPE *Proto;
1251  int i;
1252  BUCKETS *UniformBuckets = nullptr;
1253  BUCKETS *RandomBuckets = nullptr;
1254 
1255  // create a mixed proto to work on - initially assume all dimensions normal*/
1256  Proto = NewMixedProto (Clusterer->SampleSize, Cluster, Statistics);
1257 
1258  // find the proper distribution for each dimension
1259  for (i = 0; i < Clusterer->SampleSize; i++) {
1260  if (Clusterer->ParamDesc[i].NonEssential)
1261  continue;
1262 
1263  FillBuckets (NormalBuckets, Cluster, i, &(Clusterer->ParamDesc[i]),
1264  Proto->Mean[i],
1265  sqrt ((double) Proto->Variance.Elliptical[i]));
1266  if (DistributionOK (NormalBuckets))
1267  continue;
1268 
1269  if (RandomBuckets == nullptr)
1270  RandomBuckets =
1271  GetBuckets(Clusterer, D_random, Cluster->SampleCount, Confidence);
1272  MakeDimRandom (i, Proto, &(Clusterer->ParamDesc[i]));
1273  FillBuckets (RandomBuckets, Cluster, i, &(Clusterer->ParamDesc[i]),
1274  Proto->Mean[i], Proto->Variance.Elliptical[i]);
1275  if (DistributionOK (RandomBuckets))
1276  continue;
1277 
1278  if (UniformBuckets == nullptr)
1279  UniformBuckets =
1280  GetBuckets(Clusterer, uniform, Cluster->SampleCount, Confidence);
1281  MakeDimUniform(i, Proto, Statistics);
1282  FillBuckets (UniformBuckets, Cluster, i, &(Clusterer->ParamDesc[i]),
1283  Proto->Mean[i], Proto->Variance.Elliptical[i]);
1284  if (DistributionOK (UniformBuckets))
1285  continue;
1286  break;
1287  }
1288  // if any dimension failed to match a distribution, discard the proto
1289  if (i < Clusterer->SampleSize) {
1290  FreePrototype(Proto);
1291  Proto = nullptr;
1292  }
1293  return (Proto);
1294 } // MakeMixedProto
1295 
1304 void MakeDimRandom(uint16_t i, PROTOTYPE *Proto, PARAM_DESC *ParamDesc) {
1305  Proto->Distrib[i] = D_random;
1306  Proto->Mean[i] = ParamDesc->MidRange;
1307  Proto->Variance.Elliptical[i] = ParamDesc->HalfRange;
1308 
1309  // subtract out the previous magnitude of this dimension from the total
1310  Proto->TotalMagnitude /= Proto->Magnitude.Elliptical[i];
1311  Proto->Magnitude.Elliptical[i] = 1.0 / ParamDesc->Range;
1312  Proto->TotalMagnitude *= Proto->Magnitude.Elliptical[i];
1313  Proto->LogMagnitude = log ((double) Proto->TotalMagnitude);
1314 
1315  // note that the proto Weight is irrelevant for D_random protos
1316 } // MakeDimRandom
1317 
1326 void MakeDimUniform(uint16_t i, PROTOTYPE *Proto, STATISTICS *Statistics) {
1327  Proto->Distrib[i] = uniform;
1328  Proto->Mean[i] = Proto->Cluster->Mean[i] +
1329  (Statistics->Min[i] + Statistics->Max[i]) / 2;
1330  Proto->Variance.Elliptical[i] =
1331  (Statistics->Max[i] - Statistics->Min[i]) / 2;
1332  if (Proto->Variance.Elliptical[i] < MINVARIANCE)
1333  Proto->Variance.Elliptical[i] = MINVARIANCE;
1334 
1335  // subtract out the previous magnitude of this dimension from the total
1336  Proto->TotalMagnitude /= Proto->Magnitude.Elliptical[i];
1337  Proto->Magnitude.Elliptical[i] =
1338  1.0 / (2.0 * Proto->Variance.Elliptical[i]);
1339  Proto->TotalMagnitude *= Proto->Magnitude.Elliptical[i];
1340  Proto->LogMagnitude = log ((double) Proto->TotalMagnitude);
1341 
1342  // note that the proto Weight is irrelevant for uniform protos
1343 } // MakeDimUniform
1344 
1359 STATISTICS *
1360 ComputeStatistics (int16_t N, PARAM_DESC ParamDesc[], CLUSTER * Cluster) {
1361  STATISTICS *Statistics;
1362  int i, j;
1363  float *CoVariance;
1364  float *Distance;
1365  LIST SearchState;
1366  SAMPLE *Sample;
1367  uint32_t SampleCountAdjustedForBias;
1368 
1369  // allocate memory to hold the statistics results
1370  Statistics = (STATISTICS *) Emalloc (sizeof (STATISTICS));
1371  Statistics->CoVariance = (float *)Emalloc(sizeof(float) * N * N);
1372  Statistics->Min = (float *) Emalloc (N * sizeof (float));
1373  Statistics->Max = (float *) Emalloc (N * sizeof (float));
1374 
1375  // allocate temporary memory to hold the sample to mean distances
1376  Distance = (float *) Emalloc (N * sizeof (float));
1377 
1378  // initialize the statistics
1379  Statistics->AvgVariance = 1.0;
1380  CoVariance = Statistics->CoVariance;
1381  for (i = 0; i < N; i++) {
1382  Statistics->Min[i] = 0.0;
1383  Statistics->Max[i] = 0.0;
1384  for (j = 0; j < N; j++, CoVariance++)
1385  *CoVariance = 0;
1386  }
1387  // find each sample in the cluster and merge it into the statistics
1388  InitSampleSearch(SearchState, Cluster);
1389  while ((Sample = NextSample (&SearchState)) != nullptr) {
1390  for (i = 0; i < N; i++) {
1391  Distance[i] = Sample->Mean[i] - Cluster->Mean[i];
1392  if (ParamDesc[i].Circular) {
1393  if (Distance[i] > ParamDesc[i].HalfRange)
1394  Distance[i] -= ParamDesc[i].Range;
1395  if (Distance[i] < -ParamDesc[i].HalfRange)
1396  Distance[i] += ParamDesc[i].Range;
1397  }
1398  if (Distance[i] < Statistics->Min[i])
1399  Statistics->Min[i] = Distance[i];
1400  if (Distance[i] > Statistics->Max[i])
1401  Statistics->Max[i] = Distance[i];
1402  }
1403  CoVariance = Statistics->CoVariance;
1404  for (i = 0; i < N; i++)
1405  for (j = 0; j < N; j++, CoVariance++)
1406  *CoVariance += Distance[i] * Distance[j];
1407  }
1408  // normalize the variances by the total number of samples
1409  // use SampleCount-1 instead of SampleCount to get an unbiased estimate
1410  // also compute the geometic mean of the diagonal variances
1411  // ensure that clusters with only 1 sample are handled correctly
1412  if (Cluster->SampleCount > 1)
1413  SampleCountAdjustedForBias = Cluster->SampleCount - 1;
1414  else
1415  SampleCountAdjustedForBias = 1;
1416  CoVariance = Statistics->CoVariance;
1417  for (i = 0; i < N; i++)
1418  for (j = 0; j < N; j++, CoVariance++) {
1419  *CoVariance /= SampleCountAdjustedForBias;
1420  if (j == i) {
1421  if (*CoVariance < MINVARIANCE)
1422  *CoVariance = MINVARIANCE;
1423  Statistics->AvgVariance *= *CoVariance;
1424  }
1425  }
1426  Statistics->AvgVariance = (float)pow((double)Statistics->AvgVariance,
1427  1.0 / N);
1428 
1429  // release temporary memory and return
1430  free(Distance);
1431  return (Statistics);
1432 } // ComputeStatistics
1433 
1446  CLUSTER *Cluster,
1447  STATISTICS *Statistics) {
1448  PROTOTYPE *Proto;
1449 
1450  Proto = NewSimpleProto (N, Cluster);
1451 
1452  Proto->Variance.Spherical = Statistics->AvgVariance;
1453  if (Proto->Variance.Spherical < MINVARIANCE)
1454  Proto->Variance.Spherical = MINVARIANCE;
1455 
1456  Proto->Magnitude.Spherical =
1457  1.0 / sqrt(2.0 * M_PI * Proto->Variance.Spherical);
1458  Proto->TotalMagnitude = (float)pow((double)Proto->Magnitude.Spherical,
1459  (double) N);
1460  Proto->Weight.Spherical = 1.0 / Proto->Variance.Spherical;
1461  Proto->LogMagnitude = log ((double) Proto->TotalMagnitude);
1462 
1463  return (Proto);
1464 } // NewSphericalProto
1465 
1477  CLUSTER *Cluster,
1478  STATISTICS *Statistics) {
1479  PROTOTYPE *Proto;
1480  float *CoVariance;
1481  int i;
1482 
1483  Proto = NewSimpleProto (N, Cluster);
1484  Proto->Variance.Elliptical = (float *) Emalloc (N * sizeof (float));
1485  Proto->Magnitude.Elliptical = (float *) Emalloc (N * sizeof (float));
1486  Proto->Weight.Elliptical = (float *) Emalloc (N * sizeof (float));
1487 
1488  CoVariance = Statistics->CoVariance;
1489  Proto->TotalMagnitude = 1.0;
1490  for (i = 0; i < N; i++, CoVariance += N + 1) {
1491  Proto->Variance.Elliptical[i] = *CoVariance;
1492  if (Proto->Variance.Elliptical[i] < MINVARIANCE)
1493  Proto->Variance.Elliptical[i] = MINVARIANCE;
1494 
1495  Proto->Magnitude.Elliptical[i] =
1496  1.0 / sqrt(2.0 * M_PI * Proto->Variance.Elliptical[i]);
1497  Proto->Weight.Elliptical[i] = 1.0 / Proto->Variance.Elliptical[i];
1498  Proto->TotalMagnitude *= Proto->Magnitude.Elliptical[i];
1499  }
1500  Proto->LogMagnitude = log ((double) Proto->TotalMagnitude);
1501  Proto->Style = elliptical;
1502  return (Proto);
1503 } // NewEllipticalProto
1504 
1518 PROTOTYPE *NewMixedProto(int16_t N, CLUSTER *Cluster, STATISTICS *Statistics) {
1519  PROTOTYPE *Proto;
1520  int i;
1521 
1522  Proto = NewEllipticalProto (N, Cluster, Statistics);
1523  Proto->Distrib = (DISTRIBUTION *) Emalloc (N * sizeof (DISTRIBUTION));
1524 
1525  for (i = 0; i < N; i++) {
1526  Proto->Distrib[i] = normal;
1527  }
1528  Proto->Style = mixed;
1529  return (Proto);
1530 } // NewMixedProto
1531 
1540 PROTOTYPE *NewSimpleProto(int16_t N, CLUSTER *Cluster) {
1541  PROTOTYPE *Proto;
1542  int i;
1543 
1544  Proto = (PROTOTYPE *) Emalloc (sizeof (PROTOTYPE));
1545  Proto->Mean = (float *) Emalloc (N * sizeof (float));
1546 
1547  for (i = 0; i < N; i++)
1548  Proto->Mean[i] = Cluster->Mean[i];
1549  Proto->Distrib = nullptr;
1550 
1551  Proto->Significant = TRUE;
1552  Proto->Merged = FALSE;
1553  Proto->Style = spherical;
1554  Proto->NumSamples = Cluster->SampleCount;
1555  Proto->Cluster = Cluster;
1556  Proto->Cluster->Prototype = TRUE;
1557  return (Proto);
1558 } // NewSimpleProto
1559 
1578 bool
1580  int16_t N, float* CoVariance, float Independence) {
1581  int i, j;
1582  float *VARii; // points to ith on-diagonal element
1583  float *VARjj; // points to jth on-diagonal element
1584  float CorrelationCoeff;
1585 
1586  VARii = CoVariance;
1587  for (i = 0; i < N; i++, VARii += N + 1) {
1588  if (ParamDesc[i].NonEssential)
1589  continue;
1590 
1591  VARjj = VARii + N + 1;
1592  CoVariance = VARii + 1;
1593  for (j = i + 1; j < N; j++, CoVariance++, VARjj += N + 1) {
1594  if (ParamDesc[j].NonEssential)
1595  continue;
1596 
1597  if ((*VARii == 0.0) || (*VARjj == 0.0))
1598  CorrelationCoeff = 0.0;
1599  else
1600  CorrelationCoeff =
1601  sqrt (sqrt (*CoVariance * *CoVariance / (*VARii * *VARjj)));
1602  if (CorrelationCoeff > Independence)
1603  return false;
1604  }
1605  }
1606  return true;
1607 } // Independent
1608 
1625  DISTRIBUTION Distribution,
1626  uint32_t SampleCount,
1627  double Confidence) {
1628  // Get an old bucket structure with the same number of buckets.
1629  uint16_t NumberOfBuckets = OptimumNumberOfBuckets(SampleCount);
1630  BUCKETS *Buckets =
1631  clusterer->bucket_cache[Distribution][NumberOfBuckets - MINBUCKETS];
1632 
1633  // If a matching bucket structure is not found, make one and save it.
1634  if (Buckets == nullptr) {
1635  Buckets = MakeBuckets(Distribution, SampleCount, Confidence);
1636  clusterer->bucket_cache[Distribution][NumberOfBuckets - MINBUCKETS] =
1637  Buckets;
1638  } else {
1639  // Just adjust the existing buckets.
1640  if (SampleCount != Buckets->SampleCount)
1641  AdjustBuckets(Buckets, SampleCount);
1642  if (Confidence != Buckets->Confidence) {
1643  Buckets->Confidence = Confidence;
1644  Buckets->ChiSquared = ComputeChiSquared(
1645  DegreesOfFreedom(Distribution, Buckets->NumberOfBuckets),
1646  Confidence);
1647  }
1648  InitBuckets(Buckets);
1649  }
1650  return Buckets;
1651 } // GetBuckets
1652 
1670  uint32_t SampleCount,
1671  double Confidence) {
1672  const DENSITYFUNC DensityFunction[] =
1674  int i, j;
1675  BUCKETS *Buckets;
1676  double BucketProbability;
1677  double NextBucketBoundary;
1678  double Probability;
1679  double ProbabilityDelta;
1680  double LastProbDensity;
1681  double ProbDensity;
1682  uint16_t CurrentBucket;
1683  bool Symmetrical;
1684 
1685  // allocate memory needed for data structure
1686  Buckets = static_cast<BUCKETS *>(Emalloc(sizeof(BUCKETS)));
1687  Buckets->NumberOfBuckets = OptimumNumberOfBuckets(SampleCount);
1688  Buckets->SampleCount = SampleCount;
1689  Buckets->Confidence = Confidence;
1690  Buckets->Count =
1691  static_cast<uint32_t *>(Emalloc(Buckets->NumberOfBuckets * sizeof(uint32_t)));
1692  Buckets->ExpectedCount = static_cast<float *>(
1693  Emalloc(Buckets->NumberOfBuckets * sizeof(float)));
1694 
1695  // initialize simple fields
1696  Buckets->Distribution = Distribution;
1697  for (i = 0; i < Buckets->NumberOfBuckets; i++) {
1698  Buckets->Count[i] = 0;
1699  Buckets->ExpectedCount[i] = 0.0;
1700  }
1701 
1702  // all currently defined distributions are symmetrical
1703  Symmetrical = true;
1704  Buckets->ChiSquared = ComputeChiSquared(
1705  DegreesOfFreedom(Distribution, Buckets->NumberOfBuckets), Confidence);
1706 
1707  if (Symmetrical) {
1708  // allocate buckets so that all have approx. equal probability
1709  BucketProbability = 1.0 / (double) (Buckets->NumberOfBuckets);
1710 
1711  // distribution is symmetric so fill in upper half then copy
1712  CurrentBucket = Buckets->NumberOfBuckets / 2;
1713  if (Odd (Buckets->NumberOfBuckets))
1714  NextBucketBoundary = BucketProbability / 2;
1715  else
1716  NextBucketBoundary = BucketProbability;
1717 
1718  Probability = 0.0;
1719  LastProbDensity =
1720  (*DensityFunction[(int) Distribution]) (BUCKETTABLESIZE / 2);
1721  for (i = BUCKETTABLESIZE / 2; i < BUCKETTABLESIZE; i++) {
1722  ProbDensity = (*DensityFunction[(int) Distribution]) (i + 1);
1723  ProbabilityDelta = Integral (LastProbDensity, ProbDensity, 1.0);
1724  Probability += ProbabilityDelta;
1725  if (Probability > NextBucketBoundary) {
1726  if (CurrentBucket < Buckets->NumberOfBuckets - 1)
1727  CurrentBucket++;
1728  NextBucketBoundary += BucketProbability;
1729  }
1730  Buckets->Bucket[i] = CurrentBucket;
1731  Buckets->ExpectedCount[CurrentBucket] +=
1732  (float) (ProbabilityDelta * SampleCount);
1733  LastProbDensity = ProbDensity;
1734  }
1735  // place any leftover probability into the last bucket
1736  Buckets->ExpectedCount[CurrentBucket] +=
1737  (float) ((0.5 - Probability) * SampleCount);
1738 
1739  // copy upper half of distribution to lower half
1740  for (i = 0, j = BUCKETTABLESIZE - 1; i < j; i++, j--)
1741  Buckets->Bucket[i] =
1742  Mirror(Buckets->Bucket[j], Buckets->NumberOfBuckets);
1743 
1744  // copy upper half of expected counts to lower half
1745  for (i = 0, j = Buckets->NumberOfBuckets - 1; i <= j; i++, j--)
1746  Buckets->ExpectedCount[i] += Buckets->ExpectedCount[j];
1747  }
1748  return Buckets;
1749 } // MakeBuckets
1750 
1764 uint16_t OptimumNumberOfBuckets(uint32_t SampleCount) {
1765  uint8_t Last, Next;
1766  float Slope;
1767 
1768  if (SampleCount < kCountTable[0])
1769  return kBucketsTable[0];
1770 
1771  for (Last = 0, Next = 1; Next < LOOKUPTABLESIZE; Last++, Next++) {
1772  if (SampleCount <= kCountTable[Next]) {
1773  Slope = (float) (kBucketsTable[Next] - kBucketsTable[Last]) /
1774  (float) (kCountTable[Next] - kCountTable[Last]);
1775  return ((uint16_t) (kBucketsTable[Last] +
1776  Slope * (SampleCount - kCountTable[Last])));
1777  }
1778  }
1779  return kBucketsTable[Last];
1780 } // OptimumNumberOfBuckets
1781 
1798 double
1799 ComputeChiSquared (uint16_t DegreesOfFreedom, double Alpha)
1800 #define CHIACCURACY 0.01
1801 #define MINALPHA (1e-200)
1802 {
1803  static LIST ChiWith[MAXDEGREESOFFREEDOM + 1];
1804 
1805  CHISTRUCT *OldChiSquared;
1806  CHISTRUCT SearchKey;
1807 
1808  // limit the minimum alpha that can be used - if alpha is too small
1809  // it may not be possible to compute chi-squared.
1810  Alpha = ClipToRange(Alpha, MINALPHA, 1.0);
1811  if (Odd (DegreesOfFreedom))
1812  DegreesOfFreedom++;
1813 
1814  /* find the list of chi-squared values which have already been computed
1815  for the specified number of degrees of freedom. Search the list for
1816  the desired chi-squared. */
1817  SearchKey.Alpha = Alpha;
1818  OldChiSquared = (CHISTRUCT *) first_node (search (ChiWith[DegreesOfFreedom],
1819  &SearchKey, AlphaMatch));
1820 
1821  if (OldChiSquared == nullptr) {
1822  OldChiSquared = NewChiStruct (DegreesOfFreedom, Alpha);
1823  OldChiSquared->ChiSquared = Solve (ChiArea, OldChiSquared,
1824  (double) DegreesOfFreedom,
1825  (double) CHIACCURACY);
1826  ChiWith[DegreesOfFreedom] = push (ChiWith[DegreesOfFreedom],
1827  OldChiSquared);
1828  }
1829  else {
1830  // further optimization might move OldChiSquared to front of list
1831  }
1832 
1833  return (OldChiSquared->ChiSquared);
1834 
1835 } // ComputeChiSquared
1836 
1850 double NormalDensity(int32_t x) {
1851  double Distance;
1852 
1853  Distance = x - kNormalMean;
1854  return kNormalMagnitude * exp(-0.5 * Distance * Distance / kNormalVariance);
1855 } // NormalDensity
1856 
1864 double UniformDensity(int32_t x) {
1865  static double UniformDistributionDensity = (double) 1.0 / BUCKETTABLESIZE;
1866 
1867  if ((x >= 0.0) && (x <= BUCKETTABLESIZE))
1868  return UniformDistributionDensity;
1869  else
1870  return (double) 0.0;
1871 } // UniformDensity
1872 
1881 double Integral(double f1, double f2, double Dx) {
1882  return (f1 + f2) * Dx / 2.0;
1883 } // Integral
1884 
1905 void FillBuckets(BUCKETS *Buckets,
1906  CLUSTER *Cluster,
1907  uint16_t Dim,
1908  PARAM_DESC *ParamDesc,
1909  float Mean,
1910  float StdDev) {
1911  uint16_t BucketID;
1912  int i;
1913  LIST SearchState;
1914  SAMPLE *Sample;
1915 
1916  // initialize the histogram bucket counts to 0
1917  for (i = 0; i < Buckets->NumberOfBuckets; i++)
1918  Buckets->Count[i] = 0;
1919 
1920  if (StdDev == 0.0) {
1921  /* if the standard deviation is zero, then we can't statistically
1922  analyze the cluster. Use a pseudo-analysis: samples exactly on
1923  the mean are distributed evenly across all buckets. Samples greater
1924  than the mean are placed in the last bucket; samples less than the
1925  mean are placed in the first bucket. */
1926 
1927  InitSampleSearch(SearchState, Cluster);
1928  i = 0;
1929  while ((Sample = NextSample (&SearchState)) != nullptr) {
1930  if (Sample->Mean[Dim] > Mean)
1931  BucketID = Buckets->NumberOfBuckets - 1;
1932  else if (Sample->Mean[Dim] < Mean)
1933  BucketID = 0;
1934  else
1935  BucketID = i;
1936  Buckets->Count[BucketID] += 1;
1937  i++;
1938  if (i >= Buckets->NumberOfBuckets)
1939  i = 0;
1940  }
1941  }
1942  else {
1943  // search for all samples in the cluster and add to histogram buckets
1944  InitSampleSearch(SearchState, Cluster);
1945  while ((Sample = NextSample (&SearchState)) != nullptr) {
1946  switch (Buckets->Distribution) {
1947  case normal:
1948  BucketID = NormalBucket (ParamDesc, Sample->Mean[Dim],
1949  Mean, StdDev);
1950  break;
1951  case D_random:
1952  case uniform:
1953  BucketID = UniformBucket (ParamDesc, Sample->Mean[Dim],
1954  Mean, StdDev);
1955  break;
1956  default:
1957  BucketID = 0;
1958  }
1959  Buckets->Count[Buckets->Bucket[BucketID]] += 1;
1960  }
1961  }
1962 } // FillBuckets
1963 
1975 uint16_t NormalBucket(PARAM_DESC *ParamDesc,
1976  float x,
1977  float Mean,
1978  float StdDev) {
1979  float X;
1980 
1981  // wraparound circular parameters if necessary
1982  if (ParamDesc->Circular) {
1983  if (x - Mean > ParamDesc->HalfRange)
1984  x -= ParamDesc->Range;
1985  else if (x - Mean < -ParamDesc->HalfRange)
1986  x += ParamDesc->Range;
1987  }
1988 
1989  X = ((x - Mean) / StdDev) * kNormalStdDev + kNormalMean;
1990  if (X < 0)
1991  return 0;
1992  if (X > BUCKETTABLESIZE - 1)
1993  return ((uint16_t) (BUCKETTABLESIZE - 1));
1994  return (uint16_t) floor((double) X);
1995 } // NormalBucket
1996 
2008 uint16_t UniformBucket(PARAM_DESC *ParamDesc,
2009  float x,
2010  float Mean,
2011  float StdDev) {
2012  float X;
2013 
2014  // wraparound circular parameters if necessary
2015  if (ParamDesc->Circular) {
2016  if (x - Mean > ParamDesc->HalfRange)
2017  x -= ParamDesc->Range;
2018  else if (x - Mean < -ParamDesc->HalfRange)
2019  x += ParamDesc->Range;
2020  }
2021 
2022  X = ((x - Mean) / (2 * StdDev) * BUCKETTABLESIZE + BUCKETTABLESIZE / 2.0);
2023  if (X < 0)
2024  return 0;
2025  if (X > BUCKETTABLESIZE - 1)
2026  return (uint16_t) (BUCKETTABLESIZE - 1);
2027  return (uint16_t) floor((double) X);
2028 } // UniformBucket
2029 
2040 bool DistributionOK(BUCKETS* Buckets) {
2041  float FrequencyDifference;
2042  float TotalDifference;
2043  int i;
2044 
2045  // compute how well the histogram matches the expected histogram
2046  TotalDifference = 0.0;
2047  for (i = 0; i < Buckets->NumberOfBuckets; i++) {
2048  FrequencyDifference = Buckets->Count[i] - Buckets->ExpectedCount[i];
2049  TotalDifference += (FrequencyDifference * FrequencyDifference) /
2050  Buckets->ExpectedCount[i];
2051  }
2052 
2053  // test to see if the difference is more than expected
2054  if (TotalDifference > Buckets->ChiSquared)
2055  return false;
2056  else
2057  return true;
2058 } // DistributionOK
2059 
2066 void FreeStatistics(STATISTICS *Statistics) {
2067  free(Statistics->CoVariance);
2068  free(Statistics->Min);
2069  free(Statistics->Max);
2070  free(Statistics);
2071 } // FreeStatistics
2072 
2078 void FreeBuckets(BUCKETS *buckets) {
2079  Efree(buckets->Count);
2080  Efree(buckets->ExpectedCount);
2081  Efree(buckets);
2082 } // FreeBuckets
2083 
2093 void FreeCluster(CLUSTER *Cluster) {
2094  if (Cluster != nullptr) {
2095  FreeCluster (Cluster->Left);
2096  FreeCluster (Cluster->Right);
2097  free(Cluster);
2098  }
2099 } // FreeCluster
2100 
2113 uint16_t DegreesOfFreedom(DISTRIBUTION Distribution, uint16_t HistogramBuckets) {
2114  static uint8_t DegreeOffsets[] = { 3, 3, 1 };
2115 
2116  uint16_t AdjustedNumBuckets;
2117 
2118  AdjustedNumBuckets = HistogramBuckets - DegreeOffsets[(int) Distribution];
2119  if (Odd (AdjustedNumBuckets))
2120  AdjustedNumBuckets++;
2121  return (AdjustedNumBuckets);
2122 
2123 } // DegreesOfFreedom
2124 
2133 int NumBucketsMatch(void *arg1, // BUCKETS *Histogram,
2134  void *arg2) { // uint16_t *DesiredNumberOfBuckets)
2135  BUCKETS *Histogram = (BUCKETS *) arg1;
2136  uint16_t *DesiredNumberOfBuckets = (uint16_t *) arg2;
2137 
2138  return (*DesiredNumberOfBuckets == Histogram->NumberOfBuckets);
2139 
2140 } // NumBucketsMatch
2141 
2148 int ListEntryMatch(void *arg1, //ListNode
2149  void *arg2) { //Key
2150  return (arg1 == arg2);
2151 
2152 } // ListEntryMatch
2153 
2162 void AdjustBuckets(BUCKETS *Buckets, uint32_t NewSampleCount) {
2163  int i;
2164  double AdjustFactor;
2165 
2166  AdjustFactor = (((double) NewSampleCount) /
2167  ((double) Buckets->SampleCount));
2168 
2169  for (i = 0; i < Buckets->NumberOfBuckets; i++) {
2170  Buckets->ExpectedCount[i] *= AdjustFactor;
2171  }
2172 
2173  Buckets->SampleCount = NewSampleCount;
2174 
2175 } // AdjustBuckets
2176 
2183 void InitBuckets(BUCKETS *Buckets) {
2184  int i;
2185 
2186  for (i = 0; i < Buckets->NumberOfBuckets; i++) {
2187  Buckets->Count[i] = 0;
2188  }
2189 
2190 } // InitBuckets
2191 
2204 int AlphaMatch(void *arg1, //CHISTRUCT *ChiStruct,
2205  void *arg2) { //CHISTRUCT *SearchKey)
2206  CHISTRUCT *ChiStruct = (CHISTRUCT *) arg1;
2207  CHISTRUCT *SearchKey = (CHISTRUCT *) arg2;
2208 
2209  return (ChiStruct->Alpha == SearchKey->Alpha);
2210 
2211 } // AlphaMatch
2212 
2222 CHISTRUCT *NewChiStruct(uint16_t DegreesOfFreedom, double Alpha) {
2224 
2225  NewChiStruct = (CHISTRUCT *) Emalloc (sizeof (CHISTRUCT));
2227  NewChiStruct->Alpha = Alpha;
2228  return (NewChiStruct);
2229 
2230 } // NewChiStruct
2231 
2245 double
2246 Solve (SOLVEFUNC Function,
2247 void *FunctionParams, double InitialGuess, double Accuracy)
2248 #define INITIALDELTA 0.1
2249 #define DELTARATIO 0.1
2250 {
2251  double x;
2252  double f;
2253  double Slope;
2254  double Delta;
2255  double NewDelta;
2256  double xDelta;
2257  double LastPosX, LastNegX;
2258 
2259  x = InitialGuess;
2260  Delta = INITIALDELTA;
2261  LastPosX = FLT_MAX;
2262  LastNegX = -FLT_MAX;
2263  f = (*Function) ((CHISTRUCT *) FunctionParams, x);
2264  while (Abs (LastPosX - LastNegX) > Accuracy) {
2265  // keep track of outer bounds of current estimate
2266  if (f < 0)
2267  LastNegX = x;
2268  else
2269  LastPosX = x;
2270 
2271  // compute the approx. slope of f(x) at the current point
2272  Slope =
2273  ((*Function) ((CHISTRUCT *) FunctionParams, x + Delta) - f) / Delta;
2274 
2275  // compute the next solution guess */
2276  xDelta = f / Slope;
2277  x -= xDelta;
2278 
2279  // reduce the delta used for computing slope to be a fraction of
2280  //the amount moved to get to the new guess
2281  NewDelta = Abs (xDelta) * DELTARATIO;
2282  if (NewDelta < Delta)
2283  Delta = NewDelta;
2284 
2285  // compute the value of the function at the new guess
2286  f = (*Function) ((CHISTRUCT *) FunctionParams, x);
2287  }
2288  return (x);
2289 
2290 } // Solve
2291 
2310 double ChiArea(CHISTRUCT *ChiParams, double x) {
2311  int i, N;
2312  double SeriesTotal;
2313  double Denominator;
2314  double PowerOfx;
2315 
2316  N = ChiParams->DegreesOfFreedom / 2 - 1;
2317  SeriesTotal = 1;
2318  Denominator = 1;
2319  PowerOfx = 1;
2320  for (i = 1; i <= N; i++) {
2321  Denominator *= 2 * i;
2322  PowerOfx *= x;
2323  SeriesTotal += PowerOfx / Denominator;
2324  }
2325  return ((SeriesTotal * exp (-0.5 * x)) - ChiParams->Alpha);
2326 
2327 } // ChiArea
2328 
2352 bool
2354  CLUSTER* Cluster, float MaxIllegal)
2355 #define ILLEGAL_CHAR 2
2356 {
2357  static BOOL8 *CharFlags = nullptr;
2358  static int32_t NumFlags = 0;
2359  int i;
2360  LIST SearchState;
2361  SAMPLE *Sample;
2362  int32_t CharID;
2363  int32_t NumCharInCluster;
2364  int32_t NumIllegalInCluster;
2365  float PercentIllegal;
2366 
2367  // initial estimate assumes that no illegal chars exist in the cluster
2368  NumCharInCluster = Cluster->SampleCount;
2369  NumIllegalInCluster = 0;
2370 
2371  if (Clusterer->NumChar > NumFlags) {
2372  free(CharFlags);
2373  NumFlags = Clusterer->NumChar;
2374  CharFlags = (BOOL8 *) Emalloc (NumFlags * sizeof (BOOL8));
2375  }
2376 
2377  for (i = 0; i < NumFlags; i++)
2378  CharFlags[i] = FALSE;
2379 
2380  // find each sample in the cluster and check if we have seen it before
2381  InitSampleSearch(SearchState, Cluster);
2382  while ((Sample = NextSample (&SearchState)) != nullptr) {
2383  CharID = Sample->CharID;
2384  if (CharFlags[CharID] == FALSE) {
2385  CharFlags[CharID] = TRUE;
2386  }
2387  else {
2388  if (CharFlags[CharID] == TRUE) {
2389  NumIllegalInCluster++;
2390  CharFlags[CharID] = ILLEGAL_CHAR;
2391  }
2392  NumCharInCluster--;
2393  PercentIllegal = (float) NumIllegalInCluster / NumCharInCluster;
2394  if (PercentIllegal > MaxIllegal) {
2395  destroy(SearchState);
2396  return true;
2397  }
2398  }
2399  }
2400  return false;
2401 
2402 } // MultipleCharSamples
2403 
2409 double InvertMatrix(const float* input, int size, float* inv) {
2410  // Allocate memory for the 2D arrays.
2411  GENERIC_2D_ARRAY<double> U(size, size, 0.0);
2412  GENERIC_2D_ARRAY<double> U_inv(size, size, 0.0);
2413  GENERIC_2D_ARRAY<double> L(size, size, 0.0);
2414 
2415  // Initialize the working matrices. U starts as input, L as I and U_inv as O.
2416  int row;
2417  int col;
2418  for (row = 0; row < size; row++) {
2419  for (col = 0; col < size; col++) {
2420  U[row][col] = input[row*size + col];
2421  L[row][col] = row == col ? 1.0 : 0.0;
2422  U_inv[row][col] = 0.0;
2423  }
2424  }
2425 
2426  // Compute forward matrix by inversion by LU decomposition of input.
2427  for (col = 0; col < size; ++col) {
2428  // Find best pivot
2429  int best_row = 0;
2430  double best_pivot = -1.0;
2431  for (row = col; row < size; ++row) {
2432  if (Abs(U[row][col]) > best_pivot) {
2433  best_pivot = Abs(U[row][col]);
2434  best_row = row;
2435  }
2436  }
2437  // Exchange pivot rows.
2438  if (best_row != col) {
2439  for (int k = 0; k < size; ++k) {
2440  double tmp = U[best_row][k];
2441  U[best_row][k] = U[col][k];
2442  U[col][k] = tmp;
2443  tmp = L[best_row][k];
2444  L[best_row][k] = L[col][k];
2445  L[col][k] = tmp;
2446  }
2447  }
2448  // Now do the pivot itself.
2449  for (row = col + 1; row < size; ++row) {
2450  double ratio = -U[row][col] / U[col][col];
2451  for (int j = col; j < size; ++j) {
2452  U[row][j] += U[col][j] * ratio;
2453  }
2454  for (int k = 0; k < size; ++k) {
2455  L[row][k] += L[col][k] * ratio;
2456  }
2457  }
2458  }
2459  // Next invert U.
2460  for (col = 0; col < size; ++col) {
2461  U_inv[col][col] = 1.0 / U[col][col];
2462  for (row = col - 1; row >= 0; --row) {
2463  double total = 0.0;
2464  for (int k = col; k > row; --k) {
2465  total += U[row][k] * U_inv[k][col];
2466  }
2467  U_inv[row][col] = -total / U[row][row];
2468  }
2469  }
2470  // Now the answer is U_inv.L.
2471  for (row = 0; row < size; row++) {
2472  for (col = 0; col < size; col++) {
2473  double sum = 0.0;
2474  for (int k = row; k < size; ++k) {
2475  sum += U_inv[row][k] * L[k][col];
2476  }
2477  inv[row*size + col] = sum;
2478  }
2479  }
2480  // Check matrix product.
2481  double error_sum = 0.0;
2482  for (row = 0; row < size; row++) {
2483  for (col = 0; col < size; col++) {
2484  double sum = 0.0;
2485  for (int k = 0; k < size; ++k) {
2486  sum += static_cast<double>(input[row * size + k]) * inv[k * size + col];
2487  }
2488  if (row != col) {
2489  error_sum += Abs(sum);
2490  }
2491  }
2492  }
2493  return error_sum;
2494 }
ClusterHeap * heap
Definition: cluster.cpp:198
BUCKETS * MakeBuckets(DISTRIBUTION Distribution, uint32_t SampleCount, double Confidence)
Definition: cluster.cpp:1669
double Integral(double f1, double f2, double Dx)
Definition: cluster.cpp:1881
#define CHIACCURACY
float MidRange
Definition: ocrfeatures.h:50
PROTOTYPE * NewSphericalProto(uint16_t N, CLUSTER *Cluster, STATISTICS *Statistics)
Definition: cluster.cpp:1445
PROTOTYPE * MakeDegenerateProto(uint16_t N, CLUSTER *Cluster, STATISTICS *Statistics, PROTOSTYLE Style, int32_t MinSamples)
Definition: cluster.cpp:1027
int8_t Circular
Definition: ocrfeatures.h:44
float * Min
Definition: cluster.cpp:175
PROTOTYPE * MakeEllipticalProto(CLUSTERER *Clusterer, CLUSTER *Cluster, STATISTICS *Statistics, BUCKETS *Buckets)
Definition: cluster.cpp:1205
KDTREE * MakeKDTree(int16_t KeySize, const PARAM_DESC KeyDesc[])
Definition: kdtree.cpp:181
double ChiSquared
Definition: cluster.cpp:183
bool MultipleCharSamples(CLUSTERER *Clusterer, CLUSTER *Cluster, float MaxIllegal)
Definition: cluster.cpp:2353
void MakeDimRandom(uint16_t i, PROTOTYPE *Proto, PARAM_DESC *ParamDesc)
Definition: cluster.cpp:1304
CLUSTERCONFIG Config
#define MINSAMPLESNEEDED
Definition: cluster.cpp:153
float * Mean
Definition: cluster.h:78
float HalfRange
Definition: ocrfeatures.h:49
#define TRUE
Definition: capi.h:51
BUCKETS * GetBuckets(CLUSTERER *clusterer, DISTRIBUTION Distribution, uint32_t SampleCount, double Confidence)
Definition: cluster.cpp:1624
#define FTABLE_X
Definition: cluster.cpp:32
double Alpha
Definition: cluster.cpp:192
Definition: cluster.h:32
double Solve(SOLVEFUNC Function, void *FunctionParams, double InitialGuess, double Accuracy)
Definition: cluster.cpp:2246
float Min
Definition: ocrfeatures.h:46
const double FTable[FTABLE_Y][FTABLE_X]
Definition: cluster.cpp:36
float MinSamples
Definition: cluster.h:50
double(* SOLVEFUNC)(CHISTRUCT *, double)
Definition: cluster.cpp:205
int ListEntryMatch(void *arg1, void *arg2)
Definition: cluster.cpp:2148
void FreeKDTree(KDTREE *Tree)
Definition: kdtree.cpp:333
float StandardDeviation(PROTOTYPE *Proto, uint16_t Dimension)
Definition: cluster.cpp:639
uint16_t Bucket[BUCKETTABLESIZE]
Definition: cluster.cpp:185
PARAM_DESC * ParamDesc
Definition: cluster.h:88
void FreeStatistics(STATISTICS *Statistics)
Definition: cluster.cpp:2066
void ComputePrototypes(CLUSTERER *Clusterer, CLUSTERCONFIG *Config)
Definition: cluster.cpp:894
#define MAXDISTANCE
#define INITIALDELTA
uint32_t SampleCount
Definition: cluster.cpp:181
struct sample * Left
Definition: cluster.h:36
void MakeDimUniform(uint16_t i, PROTOTYPE *Proto, STATISTICS *Statistics)
Definition: cluster.cpp:1326
void * Emalloc(int Size)
Definition: emalloc.cpp:31
int32_t CharID
Definition: cluster.h:38
unsigned Prototype
Definition: cluster.h:34
void KDStore(KDTREE *Tree, float *Key, void *Data)
Definition: kdtree.cpp:213
void CreateClusterTree(CLUSTERER *Clusterer)
Definition: cluster.cpp:678
uint16_t OptimumNumberOfBuckets(uint32_t SampleCount)
Definition: cluster.cpp:1764
LIST push(LIST list, void *element)
Definition: oldlist.cpp:283
void Efree(void *ptr)
Definition: emalloc.cpp:45
void FreeBuckets(BUCKETS *Buckets)
Definition: cluster.cpp:2078
bool Independent(PARAM_DESC *ParamDesc, int16_t N, float *CoVariance, float Independence)
Definition: cluster.cpp:1579
BUCKETS * bucket_cache[DISTRIBUTION_COUNT][MAXBUCKETS+1 - MINBUCKETS]
Definition: cluster.h:95
DISTRIBUTION Distribution
Definition: cluster.cpp:180
float TotalMagnitude
Definition: cluster.h:79
#define BUCKETTABLESIZE
Definition: cluster.cpp:161
PROTOTYPE * MakeSphericalProto(CLUSTERER *Clusterer, CLUSTER *Cluster, STATISTICS *Statistics, BUCKETS *Buckets)
Definition: cluster.cpp:1170
PROTOSTYLE ProtoStyle
Definition: cluster.h:49
unsigned Merged
Definition: cluster.h:69
KDTREE * KDTree
Definition: cluster.h:90
LIST destroy(LIST list)
Definition: oldlist.cpp:170
float Spherical
Definition: cluster.h:63
SAMPLE * MakeSample(CLUSTERER *Clusterer, const float *Feature, int32_t CharID)
Definition: cluster.cpp:452
float * Elliptical
Definition: cluster.h:64
LIST pop(LIST list)
Definition: oldlist.cpp:266
float * Max
Definition: cluster.cpp:176
void InitBuckets(BUCKETS *Buckets)
Definition: cluster.cpp:2183
#define Odd(N)
Definition: cluster.cpp:207
STATISTICS * ComputeStatistics(int16_t N, PARAM_DESC ParamDesc[], CLUSTER *Cluster)
Definition: cluster.cpp:1360
PROTOTYPE * NewSimpleProto(int16_t N, CLUSTER *Cluster)
Definition: cluster.cpp:1540
uint16_t NumberOfBuckets
Definition: cluster.cpp:184
FLOATUNION Weight
Definition: cluster.h:83
Definition: kdtree.h:47
#define DELTARATIO
void AdjustBuckets(BUCKETS *Buckets, uint32_t NewSampleCount)
Definition: cluster.cpp:2162
unsigned SampleCount
Definition: cluster.h:35
PROTOSTYLE
Definition: cluster.h:44
CHISTRUCT * NewChiStruct(uint16_t DegreesOfFreedom, double Alpha)
Definition: cluster.cpp:2222
void FreePrototype(void *arg)
Definition: cluster.cpp:575
uint16_t DegreesOfFreedom
Definition: cluster.cpp:191
PROTOTYPE * MakeMixedProto(CLUSTERER *Clusterer, CLUSTER *Cluster, STATISTICS *Statistics, BUCKETS *NormalBuckets, double Confidence)
Definition: cluster.cpp:1245
float MaxIllegal
Definition: cluster.h:51
double ChiArea(CHISTRUCT *ChiParams, double x)
Definition: cluster.cpp:2310
#define MAXDEGREESOFFREEDOM
Definition: cluster.cpp:230
uint32_t * Count
Definition: cluster.cpp:186
DISTRIBUTION * Distrib
Definition: cluster.h:77
void FreeProtoList(LIST *ProtoList)
Definition: cluster.cpp:563
void destroy_nodes(LIST list, void_dest destructor)
Definition: oldlist.cpp:186
void MakePotentialClusters(ClusteringContext *context, CLUSTER *Cluster, int32_t Level)
Definition: cluster.cpp:745
#define RootOf(T)
Definition: kdtree.h:56
#define MAXBUCKETS
Definition: cluster.h:27
unsigned Style
Definition: cluster.h:74
#define MINVARIANCE
Definition: cluster.cpp:143
void KDDelete(KDTREE *Tree, float Key[], void *Data)
Definition: kdtree.cpp:254
float Mean[1]
Definition: cluster.h:39
int NumBucketsMatch(void *arg1, void *arg2)
Definition: cluster.cpp:2133
#define FALSE
Definition: capi.h:52
float Range
Definition: ocrfeatures.h:48
unsigned Significant
Definition: cluster.h:68
LIST ProtoList
Definition: cluster.h:92
void FreeCluster(CLUSTER *Cluster)
Definition: cluster.cpp:2093
LIST ClusterSamples(CLUSTERER *Clusterer, CLUSTERCONFIG *Config)
Definition: cluster.cpp:506
CLUSTER * MakeNewCluster(CLUSTERER *Clusterer, TEMPCLUSTER *TempCluster)
Definition: cluster.cpp:810
#define NORMALEXTENT
Definition: cluster.cpp:162
double ChiSquared
Definition: cluster.cpp:193
int MagicSamples
Definition: cluster.h:55
CLUSTER * Root
Definition: cluster.h:91
LIST search(LIST list, void *key, int_compare is_equal)
Definition: oldlist.cpp:366
float * CoVariance
Definition: cluster.cpp:174
int32_t MergeClusters(int16_t N, PARAM_DESC ParamDesc[], int32_t n1, int32_t n2, float m[], float m1[], float m2[])
Definition: cluster.cpp:852
double Confidence
Definition: cluster.h:54
#define FTABLE_Y
Definition: cluster.cpp:33
unsigned char BOOL8
Definition: host.h:34
DLLSYM void tprintf(const char *format,...)
Definition: tprintf.cpp:37
void KDNearestNeighborSearch(KDTREE *Tree, float Query[], int QuerySize, float MaxDistance, int *NumberOfResults, void **NBuffer, float DBuffer[])
Definition: kdtree.cpp:306
void Push(Pair *entry)
Definition: genericheap.h:95
PROTOTYPE * TestEllipticalProto(CLUSTERER *Clusterer, CLUSTERCONFIG *Config, CLUSTER *Cluster, STATISTICS *Statistics)
Definition: cluster.cpp:1069
FLOATUNION Magnitude
Definition: cluster.h:82
float Independence
Definition: cluster.h:53
Definition: cluster.h:59
PROTOTYPE * NewEllipticalProto(int16_t N, CLUSTER *Cluster, STATISTICS *Statistics)
Definition: cluster.cpp:1476
float Mean(PROTOTYPE *Proto, uint16_t Dimension)
Definition: cluster.cpp:628
int8_t NonEssential
Definition: ocrfeatures.h:45
#define ILLEGAL_CHAR
double(* DENSITYFUNC)(int32_t)
Definition: cluster.cpp:204
int32_t NumChar
Definition: cluster.h:93
#define InitSampleSearch(S, C)
Definition: cluster.h:105
PROTOTYPE * MakePrototype(CLUSTERER *Clusterer, CLUSTERCONFIG *Config, CLUSTER *Cluster)
Definition: cluster.cpp:937
uint16_t UniformBucket(PARAM_DESC *ParamDesc, float x, float Mean, float StdDev)
Definition: cluster.cpp:2008
#define first_node(l)
Definition: oldlist.h:141
#define NIL_LIST
Definition: oldlist.h:127
tesseract::GenericHeap< ClusterPair > ClusterHeap
Definition: cluster.cpp:170
CLUSTER * Cluster
Definition: cluster.h:76
void FillBuckets(BUCKETS *Buckets, CLUSTER *Cluster, uint16_t Dim, PARAM_DESC *ParamDesc, float Mean, float StdDev)
Definition: cluster.cpp:1905
double ComputeChiSquared(uint16_t DegreesOfFreedom, double Alpha)
Definition: cluster.cpp:1799
#define HOTELLING
Definition: cluster.cpp:31
DISTRIBUTION
Definition: cluster.h:58
struct sample * Right
Definition: cluster.h:37
double UniformDensity(int32_t x)
Definition: cluster.cpp:1864
#define Mirror(N, R)
Definition: cluster.cpp:208
float * ExpectedCount
Definition: cluster.cpp:187
void FreeClusterer(CLUSTERER *Clusterer)
Definition: cluster.cpp:538
TEMPCLUSTER * candidates
Definition: cluster.cpp:199
#define MINSAMPLES
Definition: cluster.cpp:152
#define iterate(l)
Definition: oldlist.h:161
unsigned NumSamples
Definition: cluster.h:75
uint16_t NormalBucket(PARAM_DESC *ParamDesc, float x, float Mean, float StdDev)
Definition: cluster.cpp:1975
unsigned Clustered
Definition: cluster.h:33
CLUSTER * FindNearestNeighbor(KDTREE *Tree, CLUSTER *Cluster, float *Distance)
Definition: cluster.cpp:775
void(* void_proc)(...)
Definition: cutil.h:30
int AlphaMatch(void *arg1, void *arg2)
Definition: cluster.cpp:2204
float LogMagnitude
Definition: cluster.h:80
#define MINBUCKETS
Definition: cluster.h:26
uint16_t DegreesOfFreedom(DISTRIBUTION Distribution, uint16_t HistogramBuckets)
Definition: cluster.cpp:2113
int32_t NumberOfSamples
Definition: cluster.h:89
#define MINALPHA
float AvgVariance
Definition: cluster.cpp:173
#define Abs(N)
Definition: cluster.cpp:209
double NormalDensity(int32_t x)
Definition: cluster.cpp:1850
float Max
Definition: ocrfeatures.h:47
T ClipToRange(const T &x, const T &lower_bound, const T &upper_bound)
Definition: helpers.h:111
#define LOOKUPTABLESIZE
Definition: cluster.cpp:229
int16_t SampleSize
Definition: cluster.h:87
bool DistributionOK(BUCKETS *Buckets)
Definition: cluster.cpp:2040
void KDWalk(KDTREE *Tree, void_proc action, void *context)
Definition: kdtree.cpp:316
#define SqrtOf2Pi
Definition: cluster.cpp:219
double InvertMatrix(const float *input, int size, float *inv)
Definition: cluster.cpp:2409
#define MAXNEIGHBORS
CLUSTER * NextSample(LIST *SearchState)
Definition: cluster.cpp:606
FLOATUNION Variance
Definition: cluster.h:81
bool Pop(Pair *entry)
Definition: genericheap.h:118
CLUSTERER * MakeClusterer(int16_t SampleSize, const PARAM_DESC ParamDesc[])
Definition: cluster.cpp:399
CLUSTER * Cluster
Definition: cluster.cpp:165
CLUSTER * Neighbor
Definition: cluster.cpp:166
#define ASSERT_HOST(x)
Definition: errcode.h:84
PROTOTYPE * NewMixedProto(int16_t N, CLUSTER *Cluster, STATISTICS *Statistics)
Definition: cluster.cpp:1518
double Confidence
Definition: cluster.cpp:182
Definition: cluster.h:45