KSquare Utilities
Classifier2.cpp
Go to the documentation of this file.
1 #include "FirstIncludes.h"
2 
3 //*******************************************************************
4 //* Classifier2 *
5 //*******************************************************************
6 
7 #include <ctype.h>
8 #include <stdio.h>
9 
10 #include <fstream>
11 #include <iomanip>
12 #include <iostream>
13 #include <string>
14 #include <vector>
15 
16 #include "MemoryDebug.h"
17 
18 
19 #ifdef WIN32
20 #include <windows.h>
21 #else
22 #endif
23 
24 using namespace std;
25 
26 
27 #include "KKBaseTypes.h"
28 #include "KKException.h"
29 #include "RunLog.h"
30 using namespace KKB;
31 
32 
33 #include "Classifier2.h"
34 #include "ClassProb.h"
35 #include "MLClass.h"
37 #include "TrainingProcess2.h"
38 using namespace KKMLL;
39 
40 
41 
43  RunLog& _log
44  ):
45 
46  abort (false),
47  classifierClassIndex (),
48  classClassifierIndex (),
49  featuresAlreadyNormalized (false),
50  mlClasses (NULL),
51  log (_log),
52  subClassifiers (NULL),
53  configRootName (),
54  trainedModel (NULL),
55  trainedModelOldSVM (NULL),
56  trainedModelSVMModel (NULL),
57  noiseMLClass (NULL),
58  trainingProcess (_trainer),
59  unKnownMLClass (NULL)
60 {
61  if (!_trainer)
62  {
63  log.Level (-1) << endl
64  << "Classifier2::Classifier2 ***ERROR*** (_trainer == NULL)" << endl
65  << endl;
66  throw KKException ("Classifier2::Classifier2 ***ERROR*** (_trainer == NULL)");
67  }
68 
69  if (_trainer->Abort ())
70  {
71  log.Level (-1) << endl
72  << "Classifier2::Classifier2 ***ERROR*** '_trainer' is invalid." << endl
73  << endl;
74  throw KKException ("Classifier2::Classifier2 ***ERROR*** '_trainer' is invalid.");
75  }
76 
77  if (trainingProcess->Config () != NULL)
78  configRootName = trainingProcess->Config ()->ConfigRootName ();
79 
80  featuresAlreadyNormalized = trainingProcess->FeaturesAlreadyNormalized ();
81  mlClasses = new MLClassList (*(_trainer->MLClasses ()));
82  trainedModel = _trainer->TrainedModel ();
83  if (trainedModel == NULL)
84  {
85  KKStr errMsg = "Classifier2::Classifier2 ***ERROR*** (trainedModel == NULL).";
86  log.Level (-1) << endl << endl << errMsg << endl << endl;
87  throw KKException (errMsg);
88  }
89 
90  if (!trainedModel->ValidModel ())
91  {
92  KKStr errMsg = "Classifier2::Classifier2 ***ERROR*** trainedModel is not valid.";
93  log.Level (-1) << endl << endl << errMsg << endl << endl;
94  throw KKException (errMsg);
95  }
96 
97  log.Level (20) << "Classifier2::Classifier2" << endl;
98  noiseMLClass = mlClasses->GetNoiseClass ();
99  unKnownMLClass = mlClasses->GetUnKnownClass ();
100 
101  if (trainedModel->ModelType () == Model::ModelTypes::OldSVM)
102  {
103  trainedModelOldSVM = dynamic_cast<ModelOldSVMPtr> (trainedModel);
104  if (trainedModelOldSVM)
105  trainedModelSVMModel = trainedModelOldSVM->SvmModel ();
106  }
107 
108  BuildSubClassifierIndex ();
109 }
110 
111 
112 
114 {
115  delete mlClasses; mlClasses = NULL;
116  delete subClassifiers; subClassifiers = NULL;
117 }
118 
119 
121 {
122  kkint32 memoryConsumedEstimated = sizeof (*this);
123  if (mlClasses) memoryConsumedEstimated += mlClasses->MemoryConsumedEstimated ();
124  return memoryConsumedEstimated;
125 } /* MemoryConsumedEstimated */
126 
127 
128 
130 {
131  if (!trainedModelOldSVM)
133  else
134  return trainedModelOldSVM->SelectionMethod ();
135 }
136 
137 
138 
140 {
141  if (!trainingProcess)
142  return NULL;
143 
144  return trainingProcess->PriorProbability ();
145 } /* PriorProbability */
146 
147 
148 
149 
150 
151 
152 
153 MLClassPtr Classifier2::ClassifyAImageOneLevel (FeatureVector& example,
154  double& probability,
155  kkint32& numOfWinners,
156  bool& knownClassOneOfTheWinners,
157  double& breakTie
158  )
159 
160 {
161  probability = 0.0;
162 
163  double probOfKnownClass = 0.0f;
164  probability = 0.0;
165 
166  MLClassPtr origClass = example.MLClass ();
167  MLClassPtr predictedClass = NULL;
168  MLClassPtr predictedClass2 = NULL;
169 
170  kkint32 class1Votes = -1;
171  kkint32 class2Votes = -1;
172 
173  double predictedClass2Prob = 0.0f;
174 
175  trainedModel->Predict (&example,
176  origClass,
177  predictedClass,
178  predictedClass2,
179  class1Votes,
180  class2Votes,
181  probOfKnownClass,
182  probability,
183  predictedClass2Prob,
184  numOfWinners,
185  knownClassOneOfTheWinners,
186  breakTie,
187  log
188  );
189 
190  if (predictedClass == NULL)
191  {
192  log.Level (-1) << endl << endl
193  << "Classifier2::ClassifyAImageOneLevel The trainedModel returned back a NULL pointer for predicted class" << endl
194  << endl;
195  predictedClass = unKnownMLClass;
196  }
197 
198 
199  if (subClassifiers)
200  {
201  Classifier2Ptr subClassifer = LookUpSubClassifietByClass (predictedClass);
202  if (subClassifer)
203  {
204  double subProbability = 0.0;
205  kkint32 subNumOfWinners = 0;
206  double subBreakTie = 0.0;
207  /**@todo make sure that the following call does not normalize the features. */
208  MLClassPtr subPrediction
209  = subClassifer->ClassifyAImageOneLevel (example, subProbability, subNumOfWinners, knownClassOneOfTheWinners, subBreakTie);
210  if (subPrediction)
211  {
212  probability = probability * subProbability;
213  numOfWinners = numOfWinners + subNumOfWinners;
214  breakTie += subBreakTie * (1.0 - breakTie);
215  }
216  }
217  }
218 
219  if (predictedClass->UnDefined ())
220  predictedClass = noiseMLClass;
221 
222  example.MLClass (predictedClass);
223 
224  return predictedClass;
225 } /* ClassifyAImageOneLevel */
226 
227 
228 
229 
230 MLClassPtr Classifier2::ClassifyAImageOneLevel (FeatureVector& example)
231 {
232  double probability = 0.0;
233  bool knownClassOneOfTheWinners = false;
234  kkint32 numOfWinners = 0;
235  double breakTie = 0.0;
236 
237  return ClassifyAImageOneLevel (example,
238  probability,
239  numOfWinners,
240  knownClassOneOfTheWinners,
241  breakTie
242  );
243 } /* ClassifyAImageOneLevel */
244 
245 
246 
247 
248 MLClassPtr Classifier2::ClassifyAImageOneLevel (FeatureVector& example,
249  kkint32& numOfWinners,
250  bool& knownClassOneOfTheWinners
251  )
252 {
253  double probability = 0.0;
254  double breakTie = 0.0;
255 
256  return ClassifyAImageOneLevel (example,
257  probability,
258  numOfWinners,
259  knownClassOneOfTheWinners,
260  breakTie
261  );
262 } /* ClassifyAImageOneLevel */
263 
264 
265 
266 
267 
268 
269 
271  MLClassPtr& predClass1,
272  MLClassPtr& predClass2,
273  kkint32& predClass1Votes,
274  kkint32& predClass2Votes,
275  double& knownClassProb,
276  double& predClass1Prob,
277  double& predClass2Prob,
278  kkint32& numOfWinners,
279  double& breakTie
280  )
281 {
282  bool knownClassOneOfTheWiners = false;
283 
284  predClass1 = NULL;
285  predClass2 = NULL;
286  knownClassProb = -1.0f;
287  predClass1Prob = -1.0f;
288  predClass2Prob = -1.0f;
289 
290  MLClassPtr origClass = example.MLClass ();
291 
292 
293  trainedModel->Predict (&example,
294  origClass,
295  predClass1,
296  predClass2,
297  predClass1Votes,
298  predClass2Votes,
299  knownClassProb,
300  predClass1Prob,
301  predClass2Prob,
302  numOfWinners,
303  knownClassOneOfTheWiners,
304  breakTie,
305  log
306  );
307 
308  if (!predClass1)
309  predClass1 = noiseMLClass;
310 
311 
312  if (subClassifiers)
313  {
314  Classifier2Ptr subClassifer = LookUpSubClassifietByClass (predClass1);
315  if (subClassifer)
316  {
317  MLClassPtr subPredClass1 = NULL;
318  MLClassPtr subPredClass2 = NULL;
319  kkint32 subPredClass1Votes = 0;
320  kkint32 subPredClass2Votes = 0;
321  double subKnownClassProb = 0.0;
322  double subPredClass1Prob = 0.0;
323  double subPredClass2Prob = 0.0;
324  kkint32 subNumOfWinners = 0;
325  double subBreakTie = 0.0;
326 
327  subClassifer->ClassifyAExample (example, subPredClass1, subPredClass2,
328  subPredClass1Votes, subPredClass2Votes, subKnownClassProb,
329  subPredClass1Prob, subPredClass2Prob, subNumOfWinners,
330  subBreakTie
331  );
332  predClass1 = subPredClass1;
333  predClass1Votes += subPredClass1Votes;
334  predClass1Prob *= subPredClass1Prob;
335  knownClassProb *= subKnownClassProb;
336  numOfWinners += subNumOfWinners;
337  breakTie += subBreakTie * (1.0 - breakTie);
338  }
339 
340  subClassifer = LookUpSubClassifietByClass (predClass2);
341  if (subClassifer)
342  {
343  MLClassPtr subPredClass1 = NULL;
344  MLClassPtr subPredClass2 = NULL;
345  kkint32 subPredClass1Votes = 0;
346  kkint32 subPredClass2Votes = 0;
347  double subKnownClassProb = 0.0;
348  double subPredClass1Prob = 0.0;
349  double subPredClass2Prob = 0.0;
350  kkint32 subNumOfWinners = 0;
351  double subBreakTie = 0.0;
352 
353  subClassifer->ClassifyAExample (example, subPredClass1, subPredClass2,
354  subPredClass1Votes, subPredClass2Votes, subKnownClassProb,
355  subPredClass1Prob, subPredClass2Prob, subNumOfWinners,
356  subBreakTie
357  );
358  predClass2 = subPredClass1;
359  predClass2Votes += subPredClass1Votes;
360  predClass2Prob *= subPredClass1Prob;
361  }
362  }
363 
364  example.MLClass (predClass1);
365 
366  return;
367 } /* ClassifyAExample */
368 
369 
370 
371 
372 
374  double& probability,
375  kkint32& numOfWinners,
376  bool& knownClassOneOfTheWinners,
377  double& breakTie
378  )
379 {
380  MLClassPtr predictedClass = NULL;
381 
382  probability = 0.0;
383 
384  predictedClass = ClassifyAImageOneLevel (example,
385  probability,
386  numOfWinners,
387  knownClassOneOfTheWinners,
388  breakTie
389  );
390  return predictedClass;
391 } /* ClassifyAExample */
392 
393 
394 
395 
397  kkint32& numOfWinners,
398  bool& knownClassOneOfTheWinners
399  )
400 {
401  MLClassPtr predictedClass = NULL;
402 
403  // Lets first Normalize Feature Data.
404 
405  predictedClass = ClassifyAImageOneLevel (example, numOfWinners, knownClassOneOfTheWinners);
406 
407  return predictedClass;
408 } /* ClassifyAExample */
409 
410 
411 
412 
414 {
415  kkint32 numOfWinners = 0;
416  bool knownClassOneOfTheWinners = false;
417  return ClassifyAExample (example, numOfWinners, knownClassOneOfTheWinners);
418 }
419 
420 
421 
422 
424  MLClassPtr c2
425  )
426 {
427  if (!trainedModelSVMModel)
428  {
429  vector<KKStr> results;
430  return results;
431  }
432  else
433  {
434  return trainedModelSVMModel->SupportVectorNames (c1, c2);
435  }
436 }
437 
438 
439 
440 
441 
444  MLClassPtr c1,
445  MLClassPtr c2
446  )
447 {
448  if (!trainedModelSVMModel)
449  {
450  vector<ProbNamePair> results;
451  return results;
452  }
453 
454  return trainedModelSVMModel->FindWorstSupportVectors (example, numToFind, c1, c2);
455 }
456 
457 
458 
459 
462  MLClassPtr c1,
463  MLClassPtr c2
464  )
465 {
466  if (!trainedModelSVMModel)
467  {
468  vector<ProbNamePair> results;
469  return results;
470  }
471 
472  return trainedModelSVMModel->FindWorstSupportVectors2 (example, numToFind, c1, c2);
473 }
474 
475 
476 
477 
479  MLClassPtr & predClass,
480  double& dist
481  )
482 {
483  trainedModel->PredictRaw (example, predClass, dist);
484  if (subClassifiers)
485  {
486  Classifier2Ptr subClassifer = LookUpSubClassifietByClass (predClass);
487  if (subClassifer)
488  subClassifer->PredictRaw (example, predClass, dist);
489  }
490 } /* PredictRaw */
491 
492 
493 
494 
496  FeatureVectorPtr example,
497  kkint32* votes,
498  double* probabilities
499  )
500 {
501  ClassProbListPtr predictions = ProbabilitiesByClass (example);
502 
503  kkuint32 numClasses = classes.size ();
504  for (kkuint32 x = 0; x < numClasses; ++x)
505  {
506  votes[x] = 0;
507  probabilities[x] = 0.0;
508 
509  MLClassPtr c = classes.IdxToPtr (x);
510  ClassProbPtr cp = predictions->LookUp (c);
511  if (cp)
512  {
513  votes[x] = (kkint32)(0.5f + cp->votes);
514  probabilities[x] = cp->probability;
515  }
516  }
517  delete predictions;
518  predictions= NULL;
519 } /* ProbabilitiesByClass */
520 
521 
522 
523 
524 MLClassListPtr Classifier2::PredictionsThatHaveSubClassifier (ClassProbListPtr predictions)
525 {
526  MLClassListPtr classes = new MLClassList ();
527  ClassProbList::iterator idx;
528  for (idx = predictions->begin (); idx != predictions->end (); ++idx)
529  {
530  ClassProbPtr cp = *idx;
531  Classifier2Ptr subClassifier = LookUpSubClassifietByClass (cp->classLabel);
532  if (subClassifier)
533  classes->PushOnBack (cp->classLabel);
534  }
535 
536  return classes;
537 } /* PredictionsThatHaveSubClassifier */
538 
539 
540 
541 
542 
543 
544 ClassProbListPtr Classifier2::GetListOfPredictionsForClassifier (Classifier2Ptr classifier,
545  ClassProbListPtr predictions
546  )
547 {
548  ClassProbListPtr subPredictions = new ClassProbList (false);
549  ClassifierClassIndexType::iterator idx;
550  idx = classifierClassIndex.find (classifier);
551  while (idx != classifierClassIndex.end ())
552  {
553  if (idx->first != classifier)
554  break;
555 
556  ClassProbPtr cp = predictions->LookUp (idx->second);
557  if (cp)
558  subPredictions->PushOnBack (cp);
559  }
560  return subPredictions;
561 } /* GetListOfPredictionsForClassifier */
562 
563 
564 
565 
566 /**
567  *@param[in] upperLevelPredictions Will take ownership and replace with new consolidated results.
568  */
569 ClassProbListPtr Classifier2::ProcessSubClassifersMethod1 (FeatureVectorPtr example,
570  ClassProbListPtr upperLevelPredictions
571  )
572 {
573  if (!subClassifiers)
574  return upperLevelPredictions;
575 
576  ClassProbListPtr results = new ClassProbList ();
577 
578  Classifier2List::iterator idx;
579 
580  for (idx = subClassifiers->begin (); idx != subClassifiers->end (); ++idx)
581  {
582  Classifier2Ptr subClassifier = *idx;
583  ClassProbListPtr subSetPredictions = subClassifier->ProbabilitiesByClass (example);
584 
585  ClassProbList::iterator idx2;
586  for (idx2 = subSetPredictions->begin (); idx2 != subSetPredictions->end (); ++idx2)
587  {
588  ClassProbPtr cp = *idx2;
589  results->MergeIn (cp);
590  }
591 
592  delete subSetPredictions;
593  subSetPredictions = NULL;
594  }
595 
596  {
597  ClassProbList::iterator idx2;
598  for (idx2 = upperLevelPredictions->begin (); idx2 != upperLevelPredictions->end (); ++idx2)
599  {
600  ClassProbPtr oldPrediction = *idx2;
601  ClassProbPtr alreadyInResults = results->LookUp (oldPrediction->classLabel);
602  if (!alreadyInResults)
603  results->MergeIn (oldPrediction);
604  }
605  }
606 
607  return results;
608 } /* ProcessSubClassifersMethod1 */
609 
610 
611 
612 
613 
614 /**
615  *@param[in] upperLevelPredictions Will take ownership and replace with new consolidated results.
616  */
617 ClassProbListPtr Classifier2::ProcessSubClassifersMethod2 (FeatureVectorPtr example,
618  ClassProbListPtr upperLevelPredictions
619  )
620 {
621  if (!subClassifiers)
622  {
623  ClassProbListPtr results = new ClassProbList (*upperLevelPredictions);
624  return results;
625  }
626 
627  ClassProbListPtr results = new ClassProbList ();
628 
629  ClassProbList::const_iterator idx1;
630  for (idx1 = upperLevelPredictions->begin (); idx1 != upperLevelPredictions->end (); ++idx1)
631  {
632  ClassProbPtr ulp = *idx1;
633  Classifier2Ptr subClassifier = LookUpSubClassifietByClass (ulp->classLabel);
634  if (subClassifier == NULL)
635  {
636  results->PushOnBack (new ClassProb (*ulp));
637  }
638  else
639  {
640  ClassProbListPtr subPredictions = subClassifier->ProbabilitiesByClass (example);
641  if (subPredictions == NULL)
642  {
643  results->PushOnBack (new ClassProb (*ulp));
644  }
645  else
646  {
647  ClassProbList::const_iterator idx2;
648  for (idx2 = subPredictions->begin (); idx2 != subPredictions->end (); ++idx2)
649  {
650  ClassProbPtr subPred = *idx2;
651  double probability = ulp->probability * subPred->probability;
652  float votes = ulp->votes + subPred->votes;
653  results->PushOnBack (new ClassProb (subPred->classLabel, probability, votes));
654  }
655  delete subPredictions;
656  subPredictions = NULL;
657  }
658  }
659 
660  }
661 
662  results->NormalizeToOne ();
663  return results;
664 } /* ProcessSubClassifersMethod2 */
665 
666 
667 
668 
669 
670 
672 {
673  if (!trainedModel)
674  return NULL;
675 
676  ClassProbListPtr results = trainedModel->ProbabilitiesByClass (example, log);
677  if (!results)
678  return NULL;
679 
680  ClassProbListPtr expandedResults = ProcessSubClassifersMethod2 (example, results);
681  delete results;
682  results = NULL;
683 
684 
685  return expandedResults;
686 } /* ProbabilitiesByClass */
687 
688 
689 
690 
691 
693  double** crossProbTable // two dimension matrix that needs to be classes.QueueSize () squared.
694  )
695 {
696  if (trainedModel)
697  trainedModel->RetrieveCrossProbTable (classes, crossProbTable, log);
698  }
699 
700 
701 
702 
703 
705  KKStr& classifier1Desc,
706  KKStr& classifier2Desc,
707  ClassProbListPtr& classifier1Results,
708  ClassProbListPtr& classifier2Results
709  )
710 {
711  if (trainedModel)
712  trainedModel->ProbabilitiesByClassDual (example, classifier1Desc, classifier2Desc, classifier1Results, classifier2Results, log);
713 }
714 
715 
716 
717 
718 
719 void Classifier2::BuildSubClassifierIndex ()
720 {
721  delete subClassifiers;
722  subClassifiers = NULL;
723  classClassifierIndex.clear ();
724  classifierClassIndex.clear ();
725 
726  if (trainingProcess == NULL)
727  return;
728 
729  if (trainingProcess->SubTrainingProcesses () == NULL)
730  return;
731 
732  if (trainingProcess->Config () == NULL)
733  return;
734 
735  subClassifiers = new Classifier2List (true);
736  {
737  TrainingProcess2ListPtr subProcessors = trainingProcess->SubTrainingProcesses ();
738  TrainingProcess2List::const_iterator idx;
739  for (idx = subProcessors->begin (); idx != subProcessors->end (); ++idx)
740  {
741  TrainingProcess2Ptr tp = *idx;
742  Classifier2Ptr subClassifier = new Classifier2 (tp, log);
743  subClassifiers->PushOnBack (subClassifier);
744  }
745  }
746 
747  {
748  TrainingConfiguration2Const* config = trainingProcess->Config ();
749 
750  const TrainingClassList& trainClasses = config->TrainingClasses ();
751  TrainingClassList::const_iterator idx;
752  for (idx = trainClasses.begin (); idx != trainClasses.end (); ++idx)
753  {
754  TrainingClassPtr tcp = *idx;
755  if (tcp->SubClassifier () != NULL)
756  {
757  Classifier2Ptr subClassifier = subClassifiers->LookUpByName (tcp->SubClassifier ()->ConfigRootName ());
758  if (subClassifier)
759  {
760  ClassClassifierIndexType::const_iterator idx;
761  idx = classClassifierIndex.find (tcp->MLClass ());
762  if (idx == classClassifierIndex.end ())
763  classClassifierIndex.insert (ClassClassifierPair (tcp->MLClass (), subClassifier));
764  classifierClassIndex.insert (ClassifierClassPair (subClassifier, tcp->MLClass ()));
765  }
766  }
767  }
768  }
769 } /* BuildSubClassifierIndex */
770 
771 
772 
773 
774 Classifier2Ptr Classifier2::LookUpSubClassifietByClass (MLClassPtr c)
775 {
776  ClassClassifierIndexType::const_iterator idx;
777  idx = classClassifierIndex.find (c);
778  if (idx == classClassifierIndex.end ())
779  return NULL;
780  else
781  return idx->second;
782 } /* LookUpSubClassifietByClass */
783 
784 
785 
786 
787 
788 
791 {
792 }
793 
794 
795 
797 {
798 }
799 
800 
801 
803 {
804  Classifier2List::const_iterator idx;
805  for (idx = begin (); idx != end (); ++idx)
806  {
807  Classifier2Ptr c = *idx;
809  return c;
810  }
811  return NULL;
812 } /* LookUpByName */
MLClassPtr ClassifyAExample(FeatureVector &example, kkint32 &numOfWinners, bool &knownClassOneOfTheWinners)
Base class to all Learning Algorithms.
Definition: Model.h:82
virtual void Predict(FeatureVectorPtr example, MLClassPtr knownClass, MLClassPtr &predClass1, MLClassPtr &predClass2, kkint32 &predClass1Votes, kkint32 &predClass2Votes, double &probOfKnownClass, double &predClass1Prob, double &predClass2Prob, kkint32 &numOfWinners, bool &knownClassOneOfTheWinners, double &breakTie, RunLog &log)=0
SVM_SelectionMethod SelectionMethod() const
MLClassPtr GetUnKnownClass()
Return a pointer to the MLClass object that represents the unknown Class in the list.
Definition: MLClass.cpp:893
SVM_SelectionMethod
Definition: SVMparam.h:34
__int32 kkint32
Definition: KKBaseTypes.h:88
FeatureVector * FeatureVectorPtr
Definition: Model.h:44
const KKStr & ConfigRootName() const
const KKStr & ConfigRootName() const
Definition: Classifier2.h:87
MLClassPtr GetNoiseClass() const
Definition: MLClass.cpp:875
SVMModelPtr SvmModel() const
Definition: ModelOldSVM.h:92
Classifier2List(bool _owner)
void ProbabilitiesByClassDual(FeatureVectorPtr example, KKStr &classifier1Desc, KKStr &classifier2Desc, ClassProbListPtr &classifier1Results, ClassProbListPtr &classifier2Results)
void MergeIn(const ClassProbPtr cp)
Adds the Prediction in &#39;cp&#39; into this list.
Definition: ClassProb.cpp:268
virtual void PushOnBack(ClassProbPtr cp)
Definition: ClassProb.cpp:212
ModelPtr TrainedModel() const
TrainingConfiguration2Const * Config()
ClassProbList const * PriorProbability() const
bool UnDefined() const
Definition: MLClass.h:183
kkint32 MemoryConsumedEstimated() const
Definition: MLClass.cpp:616
std::vector< ProbNamePair > FindWorstSupportVectors(FeatureVectorPtr example, kkint32 numToFind, MLClassPtr c1, MLClassPtr c2)
For a given two class pair return the names of the &#39;numToFind&#39; worst S/V&#39;s.
Classifier2Ptr LookUpByName(const KKStr &rootName) const
virtual void PredictRaw(FeatureVectorPtr example, MLClassPtr &predClass, double &dist)
Definition: Model.h:230
bool EqualIgnoreCase(const KKStr &s2) const
Definition: KKStr.cpp:1250
unsigned __int32 kkuint32
Definition: KKBaseTypes.h:89
MLClassListPtr MLClasses() const
virtual kkint32 MemoryConsumedEstimated() const
KKTHread * KKTHreadPtr
ClassProbList const * PriorProbability() const
Returns the distribution of the training data used to build the classifier.
const ClassProbPtr LookUp(MLClassPtr targetClass) const
Definition: ClassProb.cpp:160
std::vector< KKStr > SupportVectorNames(MLClassPtr c1, MLClassPtr c2)
void PredictRaw(FeatureVectorPtr example, MLClassPtr &predClass, double &dist)
void ClassifyAExample(FeatureVector &example, MLClassPtr &predClass1, MLClassPtr &predClass2, kkint32 &predClass1Votes, kkint32 &predClass2Votes, double &knownClassProb, double &predClass1Prob, double &predClass2Prob, kkint32 &numOfWinners, double &breakTie)
Used to record probability for a specified class; and a list of classes.
Definition: ClassProb.h:25
virtual void PushOnBack(MLClassPtr mlClass)
Definition: MLClass.cpp:798
virtual ~Classifier2()
ClassProbList(bool owner)
Definition: ClassProb.cpp:48
static KKStr Concat(const std::vector< std::string > &values)
Concatenates the list of &#39;std::string&#39; strings.
Definition: KKStr.cpp:1082
ClassProbList * ClassProbListPtr
Definition: Classifier2.h:30
double probability
Definition: ClassProb.h:36
Classifier2(TrainingProcess2Ptr _trainer, RunLog &_log)
Definition: Classifier2.cpp:42
virtual void RetrieveCrossProbTable(MLClassList &classes, double **crossProbTable, RunLog &log)
Definition: Model.cpp:755
ClassProbList(const ClassProbList &pairList)
Definition: ClassProb.cpp:55
MLClassPtr MLClass() const
Class that is example is assigned to.
void MLClass(MLClassPtr _mlClass)
Assign a class to this example.
Definition: FeatureVector.h:74
Classifier2 * Classifier2Ptr
Definition: Classifier2.h:78
const TrainingClassList & TrainingClasses() const
SVM_SelectionMethod SelectionMethod() const
TrainingProcess2 * TrainingProcess2Ptr
Definition: Classifier2.h:62
virtual void ProbabilitiesByClassDual(FeatureVectorPtr example, KKStr &classifier1Desc, KKStr &classifier2Desc, ClassProbListPtr &classifier1Results, ClassProbListPtr &classifier2Results, RunLog &log)
Only applied to ModelDual classifier.
Definition: Model.cpp:830
bool ValidModel() const
Definition: Model.h:195
ClassProb(const ClassProb &_pair)
Definition: ClassProb.cpp:33
TrainingConfiguration2Ptr SubClassifier() const
Definition: TrainingClass.h:72
virtual ClassProbListPtr ProbabilitiesByClass(FeatureVectorPtr example, RunLog &log)=0
virtual ModelTypes ModelType() const =0
MLClassList(const MLClassList &_mlClasses)
Copy constructor; will copy list but not own the contents.
Definition: MLClass.cpp:570
bool FeaturesAlreadyNormalized() const
MLClassPtr ClassifyAExample(FeatureVector &example, double &probability, kkint32 &numOfWinners, bool &knownClassOneOfTheWinners, double &breakTie)
KKException(const char *_exceptionStr)
Definition: KKException.cpp:38
void NormalizeToOne()
Will normalize the list of predictions such that the total probability will equal 1...
Definition: ClassProb.cpp:336
KKStr & operator=(const KKStr &src)
Definition: KKStr.cpp:1390
std::vector< ProbNamePair > FindWorstSupportVectors2(FeatureVectorPtr example, kkint32 numToFind, MLClassPtr c1, MLClassPtr c2)
For a given two class pair return the names of the &#39;numToFind&#39; worst S/V&#39;s.
void ProbabilitiesByClass(const MLClassList &classes, FeatureVectorPtr example, kkint32 *votes, double *probabilities)
For a given feature vector return back the probabilities and votes for each class.
Used for logging messages.
Definition: RunLog.h:49
void EncodeProblem(const struct svm_paramater &param, struct svm_problem &prob_in, struct svm_problem &prob_out)
ClassProbListPtr ProbabilitiesByClass(FeatureVectorPtr example)
void RetrieveCrossProbTable(MLClassList &classes, double **crossProbTable)
TrainingProcess2ListPtr SubTrainingProcesses() const
KKException(const KKStr &_exceptionStr)
Definition: KKException.cpp:45
ClassProb(MLClassPtr _classLabel, double _probability, float _votes)
Definition: ClassProb.cpp:22
Maintains a list of MLClass instances.
Definition: MLClass.h:233
Represents a Feature Vector of a single example, labeled or unlabeled.
Definition: FeatureVector.h:59
MLClassPtr classLabel
Definition: ClassProb.h:35
TrainingClass * TrainingClassPtr
Classifier2 * Classifier2Ptr
Definition: Classifier2.h:284
TrainingProcess2List * TrainingProcess2ListPtr
Definition: Classifier2.h:68
MLClassPtr ClassifyAExample(FeatureVector &example)
ClassProb * ClassProbPtr
Definition: Classifier2.h:28