38 using namespace KKMLL;
47 classifierClassIndex (),
48 classClassifierIndex (),
49 featuresAlreadyNormalized (
false),
52 subClassifiers (NULL),
55 trainedModelOldSVM (NULL),
56 trainedModelSVMModel (NULL),
58 trainingProcess (_trainer),
63 log.Level (-1) << endl
64 <<
"Classifier2::Classifier2 ***ERROR*** (_trainer == NULL)" << endl
66 throw KKException ("Classifier2::Classifier2 ***ERROR*** (_trainer == NULL)");
71 log.Level (-1) << endl
72 <<
"Classifier2::Classifier2 ***ERROR*** '_trainer' is invalid." << endl
74 throw KKException ("Classifier2::Classifier2 ***ERROR*** '_trainer' is invalid.");
83 if (trainedModel == NULL)
85 KKStr errMsg =
"Classifier2::Classifier2 ***ERROR*** (trainedModel == NULL).";
86 log.Level (-1) << endl << endl << errMsg << endl << endl;
92 KKStr errMsg =
"Classifier2::Classifier2 ***ERROR*** trainedModel is not valid.";
93 log.Level (-1) << endl << endl << errMsg << endl << endl;
97 log.Level (20) <<
"Classifier2::Classifier2" << endl;
103 trainedModelOldSVM =
dynamic_cast<ModelOldSVMPtr> (trainedModel);
104 if (trainedModelOldSVM)
108 BuildSubClassifierIndex ();
115 delete mlClasses; mlClasses = NULL;
116 delete subClassifiers; subClassifiers = NULL;
122 kkint32 memoryConsumedEstimated =
sizeof (*
this);
124 return memoryConsumedEstimated;
131 if (!trainedModelOldSVM)
141 if (!trainingProcess)
156 bool& knownClassOneOfTheWinners,
163 double probOfKnownClass = 0.0f;
167 MLClassPtr predictedClass = NULL;
168 MLClassPtr predictedClass2 = NULL;
173 double predictedClass2Prob = 0.0f;
185 knownClassOneOfTheWinners
, 190 if (predictedClass == NULL)
192 log.Level (-1) << endl << endl
193 <<
"Classifier2::ClassifyAImageOneLevel The trainedModel returned back a NULL pointer for predicted class" << endl
195 predictedClass = unKnownMLClass;
201 Classifier2Ptr subClassifer = LookUpSubClassifietByClass (predictedClass);
204 double subProbability = 0.0;
206 double subBreakTie = 0.0;
208 MLClassPtr subPrediction
209 = subClassifer->ClassifyAImageOneLevel (example, subProbability, subNumOfWinners, knownClassOneOfTheWinners, subBreakTie);
212 probability = probability * subProbability;
213 numOfWinners = numOfWinners + subNumOfWinners;
214 breakTie += subBreakTie * (1.0 - breakTie);
220 predictedClass = noiseMLClass;
224 return predictedClass;
232 double probability = 0.0;
233 bool knownClassOneOfTheWinners =
false;
235 double breakTie = 0.0;
237 return ClassifyAImageOneLevel (example,
240 knownClassOneOfTheWinners,
250 bool& knownClassOneOfTheWinners
253 double probability = 0.0;
254 double breakTie = 0.0;
256 return ClassifyAImageOneLevel (example,
259 knownClassOneOfTheWinners,
271 MLClassPtr& predClass1,
272 MLClassPtr& predClass2,
275 double& knownClassProb,
276 double& predClass1Prob,
277 double& predClass2Prob,
282 bool knownClassOneOfTheWiners =
false;
286 knownClassProb = -1.0f;
287 predClass1Prob = -1.0f;
288 predClass2Prob = -1.0f;
303 knownClassOneOfTheWiners
, 309 predClass1 = noiseMLClass;
314 Classifier2Ptr subClassifer = LookUpSubClassifietByClass (predClass1);
317 MLClassPtr subPredClass1 = NULL;
318 MLClassPtr subPredClass2 = NULL;
319 kkint32 subPredClass1Votes = 0;
320 kkint32 subPredClass2Votes = 0;
321 double subKnownClassProb = 0.0;
322 double subPredClass1Prob = 0.0;
323 double subPredClass2Prob = 0.0;
325 double subBreakTie = 0.0;
328 subPredClass1Votes
, subPredClass2Votes
, subKnownClassProb
, 329 subPredClass1Prob
, subPredClass2Prob
, subNumOfWinners
, 332 predClass1 = subPredClass1;
333 predClass1Votes += subPredClass1Votes;
334 predClass1Prob *= subPredClass1Prob;
335 knownClassProb *= subKnownClassProb;
336 numOfWinners += subNumOfWinners;
337 breakTie += subBreakTie * (1.0 - breakTie);
340 subClassifer = LookUpSubClassifietByClass (predClass2);
343 MLClassPtr subPredClass1 = NULL;
344 MLClassPtr subPredClass2 = NULL;
345 kkint32 subPredClass1Votes = 0;
346 kkint32 subPredClass2Votes = 0;
347 double subKnownClassProb = 0.0;
348 double subPredClass1Prob = 0.0;
349 double subPredClass2Prob = 0.0;
351 double subBreakTie = 0.0;
354 subPredClass1Votes
, subPredClass2Votes
, subKnownClassProb
, 355 subPredClass1Prob
, subPredClass2Prob
, subNumOfWinners
, 358 predClass2 = subPredClass1;
359 predClass2Votes += subPredClass1Votes;
360 predClass2Prob *= subPredClass1Prob;
376 bool& knownClassOneOfTheWinners,
380 MLClassPtr predictedClass = NULL;
384 predictedClass = ClassifyAImageOneLevel (example,
387 knownClassOneOfTheWinners,
390 return predictedClass;
398 bool& knownClassOneOfTheWinners
401 MLClassPtr predictedClass = NULL;
405 predictedClass = ClassifyAImageOneLevel (example, numOfWinners, knownClassOneOfTheWinners);
407 return predictedClass;
416 bool knownClassOneOfTheWinners =
false;
427 if (!trainedModelSVMModel)
429 vector<KKStr> results;
434 return trainedModelSVMModel->SupportVectorNames (c1, c2);
448 if (!trainedModelSVMModel)
450 vector<ProbNamePair> results;
454 return trainedModelSVMModel->FindWorstSupportVectors (example, numToFind, c1, c2);
466 if (!trainedModelSVMModel)
468 vector<ProbNamePair> results;
472 return trainedModelSVMModel->FindWorstSupportVectors2 (example, numToFind, c1, c2);
479 MLClassPtr & predClass,
486 Classifier2Ptr subClassifer = LookUpSubClassifietByClass (predClass);
498 double* probabilities
503 kkuint32 numClasses = classes.size ();
504 for (
kkuint32 x = 0; x < numClasses; ++x)
507 probabilities[x] = 0.0;
509 MLClassPtr c = classes.IdxToPtr (x);
527 ClassProbList::iterator idx;
528 for (idx = predictions->begin (); idx != predictions->end (); ++idx)
549 ClassifierClassIndexType::iterator idx;
550 idx = classifierClassIndex.find (classifier);
551 while (idx != classifierClassIndex.end ())
553 if (idx->first != classifier)
556 ClassProbPtr cp = predictions->LookUp (idx->second);
558 subPredictions->PushOnBack (cp);
560 return subPredictions;
574 return upperLevelPredictions;
578 Classifier2List::iterator idx;
580 for (idx = subClassifiers->begin (); idx != subClassifiers->end (); ++idx)
585 ClassProbList::iterator idx2;
586 for (idx2 = subSetPredictions->begin (); idx2 != subSetPredictions->end (); ++idx2)
592 delete subSetPredictions;
593 subSetPredictions = NULL;
597 ClassProbList::iterator idx2;
598 for (idx2 = upperLevelPredictions->begin (); idx2 != upperLevelPredictions->end (); ++idx2)
602 if (!alreadyInResults)
629 ClassProbList::const_iterator idx1;
630 for (idx1 = upperLevelPredictions->begin (); idx1 != upperLevelPredictions->end (); ++idx1)
634 if (subClassifier == NULL)
641 if (subPredictions == NULL)
647 ClassProbList::const_iterator idx2;
648 for (idx2 = subPredictions->begin (); idx2 != subPredictions->end (); ++idx2)
655 delete subPredictions;
656 subPredictions = NULL;
680 ClassProbListPtr expandedResults = ProcessSubClassifersMethod2 (example, results);
685 return expandedResults;
693 double** crossProbTable
705 KKStr& classifier1Desc,
706 KKStr& classifier2Desc,
721 delete subClassifiers;
722 subClassifiers = NULL;
723 classClassifierIndex.clear ();
724 classifierClassIndex.clear ();
726 if (trainingProcess == NULL)
738 TrainingProcess2List::const_iterator idx;
739 for (idx = subProcessors->begin (); idx != subProcessors->end (); ++idx)
743 subClassifiers->PushOnBack (subClassifier);
748 TrainingConfiguration2Const* config = trainingProcess
->Config ();
751 TrainingClassList::const_iterator idx;
752 for (idx = trainClasses.begin (); idx != trainClasses.end (); ++idx)
760 ClassClassifierIndexType::const_iterator idx;
761 idx = classClassifierIndex.find (tcp->MLClass ());
762 if (idx == classClassifierIndex.end ())
763 classClassifierIndex.insert (ClassClassifierPair (tcp->MLClass (), subClassifier));
764 classifierClassIndex.insert (ClassifierClassPair (subClassifier, tcp->MLClass ()));
776 ClassClassifierIndexType::const_iterator idx;
777 idx = classClassifierIndex.find (c);
778 if (idx == classClassifierIndex.end ())
804 Classifier2List::const_iterator idx;
805 for (idx = begin (); idx != end (); ++idx)
MLClassPtr ClassifyAExample(FeatureVector &example, kkint32 &numOfWinners, bool &knownClassOneOfTheWinners)
Base class to all Learning Algorithms.
virtual ~Classifier2List()
virtual void Predict(FeatureVectorPtr example, MLClassPtr knownClass, MLClassPtr &predClass1, MLClassPtr &predClass2, kkint32 &predClass1Votes, kkint32 &predClass2Votes, double &probOfKnownClass, double &predClass1Prob, double &predClass2Prob, kkint32 &numOfWinners, bool &knownClassOneOfTheWinners, double &breakTie, RunLog &log)=0
SVM_SelectionMethod SelectionMethod() const
MLClassPtr GetUnKnownClass()
Return a pointer to the MLClass object that represents the unknown Class in the list.
FeatureVector * FeatureVectorPtr
const KKStr & ConfigRootName() const
const KKStr & ConfigRootName() const
MLClassPtr GetNoiseClass() const
SVMModelPtr SvmModel() const
Classifier2List(bool _owner)
void ProbabilitiesByClassDual(FeatureVectorPtr example, KKStr &classifier1Desc, KKStr &classifier2Desc, ClassProbListPtr &classifier1Results, ClassProbListPtr &classifier2Results)
void MergeIn(const ClassProbPtr cp)
Adds the Prediction in 'cp' into this list.
virtual void PushOnBack(ClassProbPtr cp)
ModelPtr TrainedModel() const
TrainingConfiguration2Const * Config()
ClassProbList const * PriorProbability() const
kkint32 MemoryConsumedEstimated() const
std::vector< ProbNamePair > FindWorstSupportVectors(FeatureVectorPtr example, kkint32 numToFind, MLClassPtr c1, MLClassPtr c2)
For a given two class pair return the names of the 'numToFind' worst S/V's.
Classifier2Ptr LookUpByName(const KKStr &rootName) const
virtual void PredictRaw(FeatureVectorPtr example, MLClassPtr &predClass, double &dist)
bool EqualIgnoreCase(const KKStr &s2) const
unsigned __int32 kkuint32
MLClassListPtr MLClasses() const
virtual kkint32 MemoryConsumedEstimated() const
ClassProbList const * PriorProbability() const
Returns the distribution of the training data used to build the classifier.
const ClassProbPtr LookUp(MLClassPtr targetClass) const
std::vector< KKStr > SupportVectorNames(MLClassPtr c1, MLClassPtr c2)
void PredictRaw(FeatureVectorPtr example, MLClassPtr &predClass, double &dist)
void ClassifyAExample(FeatureVector &example, MLClassPtr &predClass1, MLClassPtr &predClass2, kkint32 &predClass1Votes, kkint32 &predClass2Votes, double &knownClassProb, double &predClass1Prob, double &predClass2Prob, kkint32 &numOfWinners, double &breakTie)
Used to record probability for a specified class; and a list of classes.
virtual void PushOnBack(MLClassPtr mlClass)
ClassProbList(bool owner)
static KKStr Concat(const std::vector< std::string > &values)
Concatenates the list of 'std::string' strings.
ClassProbList * ClassProbListPtr
Classifier2(TrainingProcess2Ptr _trainer, RunLog &_log)
virtual void RetrieveCrossProbTable(MLClassList &classes, double **crossProbTable, RunLog &log)
ClassProbList(const ClassProbList &pairList)
MLClassPtr MLClass() const
Class that is example is assigned to.
void MLClass(MLClassPtr _mlClass)
Assign a class to this example.
Classifier2 * Classifier2Ptr
const TrainingClassList & TrainingClasses() const
SVM_SelectionMethod SelectionMethod() const
TrainingProcess2 * TrainingProcess2Ptr
virtual void ProbabilitiesByClassDual(FeatureVectorPtr example, KKStr &classifier1Desc, KKStr &classifier2Desc, ClassProbListPtr &classifier1Results, ClassProbListPtr &classifier2Results, RunLog &log)
Only applied to ModelDual classifier.
ClassProb(const ClassProb &_pair)
TrainingConfiguration2Ptr SubClassifier() const
virtual ClassProbListPtr ProbabilitiesByClass(FeatureVectorPtr example, RunLog &log)=0
virtual ModelTypes ModelType() const =0
MLClassList(const MLClassList &_mlClasses)
Copy constructor; will copy list but not own the contents.
bool FeaturesAlreadyNormalized() const
MLClassPtr ClassifyAExample(FeatureVector &example, double &probability, kkint32 &numOfWinners, bool &knownClassOneOfTheWinners, double &breakTie)
KKException(const char *_exceptionStr)
void NormalizeToOne()
Will normalize the list of predictions such that the total probability will equal 1...
KKStr & operator=(const KKStr &src)
std::vector< ProbNamePair > FindWorstSupportVectors2(FeatureVectorPtr example, kkint32 numToFind, MLClassPtr c1, MLClassPtr c2)
For a given two class pair return the names of the 'numToFind' worst S/V's.
void ProbabilitiesByClass(const MLClassList &classes, FeatureVectorPtr example, kkint32 *votes, double *probabilities)
For a given feature vector return back the probabilities and votes for each class.
Used for logging messages.
void EncodeProblem(const struct svm_paramater ¶m, struct svm_problem &prob_in, struct svm_problem &prob_out)
ClassProbListPtr ProbabilitiesByClass(FeatureVectorPtr example)
void RetrieveCrossProbTable(MLClassList &classes, double **crossProbTable)
TrainingProcess2ListPtr SubTrainingProcesses() const
KKException(const KKStr &_exceptionStr)
ClassProb(MLClassPtr _classLabel, double _probability, float _votes)
Maintains a list of MLClass instances.
Represents a Feature Vector of a single example, labeled or unlabeled.
TrainingClass * TrainingClassPtr
Classifier2 * Classifier2Ptr
TrainingProcess2List * TrainingProcess2ListPtr
MLClassPtr ClassifyAExample(FeatureVector &example)