KSquare Utilities
SVM289_MFS Namespace Reference

Namespce used to wrap implementation of libSVM version 2.89. More...

Classes

class  Cache
 
struct  decision_function
 
class  Kernel
 
class  ONE_CLASS_Q
 
class  QMatrix
 
class  Solver
 
class  Solver_NU
 
class  SVC_Q
 
struct  Svm_Model
 
struct  svm_parameter
 
struct  svm_problem
 
class  SVR_Q
 

Typedefs

typedef float Qfloat
 
typedef signed char schar
 
typedef XmlElementTemplate< Svm_ModelXmlElementSvm_Model
 
typedef XmlElementSvm_ModelXmlElementSvm_ModelPtr
 

Enumerations

enum  Kernel_Type {
  Kernel_Type::Kernel_NULL, Kernel_Type::LINEAR, Kernel_Type::POLY, Kernel_Type::RBF,
  Kernel_Type::SIGMOID, Kernel_Type::PRECOMPUTED
}
 
enum  SVM_Type {
  SVM_Type::SVM_NULL, SVM_Type::C_SVC, SVM_Type::NU_SVC, SVM_Type::ONE_CLASS,
  SVM_Type::EPSILON_SVR, SVM_Type::NU_SVR
}
 

Functions

template<class S , class T >
void clone (T *&dst, S *src, kkint32 n)
 
template<class T >
T * GrowAllocation (T *src, kkint32 origSize, kkint32 newSize)
 
Kernel_Type Kernel_Type_FromStr (KKStr s)
 
KKStr Kernel_Type_ToStr (Kernel_Type kernelType)
 
void multiclass_probability (kkint32 numClasses, double **pairwiseProbs, double *classProb)
 
double powi (double base, kkint32 times)
 
double sigmoid_predict (double decision_value, double A, double B)
 
void sigmoid_train (kkint32 numExamples, const double *dec_values, const double *labels, double &A, double &B)
 
void solve_c_svc (const svm_problem *prob, const svm_parameter *param, double *alpha, Solver::SolutionInfo *si, double Cp, double Cn, RunLog &_log)
 
void solve_epsilon_svr (const svm_problem *prob, const svm_parameter *param, double *alpha, Solver::SolutionInfo *si, RunLog &_log)
 
void solve_nu_svc (const svm_problem *prob, const svm_parameter *param, double *alpha, Solver::SolutionInfo *si, RunLog &_log)
 
void solve_one_class (const svm_problem *prob, const svm_parameter *param, double *alpha, Solver::SolutionInfo *si, RunLog &_log)
 
const char * svm_check_parameter (const struct svm_problem *prob, const struct svm_parameter *param)
 
kkint32 svm_check_probability_model (const struct Svm_Model *model)
 
void svm_cross_validation (const svm_problem &prob, const svm_parameter &param, kkint32 nr_fold, double *target, RunLog &log)
 
void svm_destroy_model (struct Svm_Model *&model)
 
void svm_destroy_param (struct svm_parameter *&param)
 
void svm_get_labels (const struct Svm_Model *model, kkint32 *label)
 
kkint32 svm_get_nr_class (const struct Svm_Model *model)
 
kkint32 svm_get_svm_type (const struct Svm_Model *model)
 
double svm_get_svr_probability (const struct Svm_Model *model)
 
double svm_predict (const struct Svm_Model *model, const FeatureVector &x)
 
double svm_predict_probability (Svm_Model *model, const FeatureVector &x, double *prob_estimates, kkint32 *votes)
 
void svm_predict_values (const Svm_Model *model, const FeatureVector &x, double *dec_values)
 
Svm_Modelsvm_train (const svm_problem &prob, const svm_parameter &param, RunLog &log)
 
decision_function svm_train_one (const svm_problem &prob, const svm_parameter &param, double Cp, double Cn, RunLog &_log)
 
SVM_Type SVM_Type_FromStr (KKStr s)
 
KKStr SVM_Type_ToStr (SVM_Type svmType)
 
template<class T >
void swap (T &x, T &y)
 

Variables

kkint32 libsvm_version
 
void(* svm_print_string )(const char *) = &print_string_stdout
 

Detailed Description

Namespce used to wrap implementation of libSVM version 2.89.

There is more than one version of libSVM implemented in the library. To prevent name conflicts between them each one was wrapped in their own namespace.
libSVM is a Support Vector Machine implementation done by "Chih-Chung Chang" and "Chih-Jen Lin". It was downloaded from http://www.csie.ntu.edu.tw/~cjlin/libsvm/. The source code was modified by Kurt Kramer. The primary changes to this implementation involves the replacement of the sparse data-structure in the original implementation with fixed length array implemented through the "FeatureVector" class and the ability to specify a sub-set of features to be utilized via the "FeatureNumList" class. This allows us to load in a single set of training data with all its features that can then be used for multiple Support Vector Machine instances where each instance utilizes a different set of features. The use of this version of libSVM(SVM289_MFS) is via the "ModelSvmBase" class.

Typedef Documentation

typedef float SVM289_MFS::Qfloat

Definition at line 287 of file svm2.h.

typedef signed char SVM289_MFS::schar

Definition at line 289 of file svm2.h.

Definition at line 230 of file svm2.h.

Definition at line 231 of file svm2.h.

Enumeration Type Documentation

Enumerator
Kernel_NULL 
LINEAR 
POLY 
RBF 
SIGMOID 
PRECOMPUTED 

Definition at line 83 of file svm2.h.

84  {
86  LINEAR,
87  POLY,
88  RBF,
89  SIGMOID,
91  };
#define LINEAR
Definition: UsfCasCor.h:221
#define SIGMOID
Definition: UsfCasCor.h:219
enum SVM289_MFS::SVM_Type
strong
Enumerator
SVM_NULL 
C_SVC 
NU_SVC 
ONE_CLASS 
EPSILON_SVR 
NU_SVR 

Definition at line 72 of file svm2.h.

Function Documentation

template<class S , class T >
void SVM289_MFS::clone ( T *&  dst,
S *  src,
kkint32  n 
)
inline

Definition at line 292 of file svm2.h.

293  {
294  dst = new T[n];
295 
296  kkint32 sizeOfT = sizeof(T);
297  KKStr::MemCpy ((void *)dst, (void *)src, sizeOfT * n);
298  }
__int32 kkint32
Definition: KKBaseTypes.h:88
template<class T >
T* SVM289_MFS::GrowAllocation ( T *  src,
kkint32  origSize,
kkint32  newSize 
)
inline

Definition at line 80 of file svm2.cpp.

84  {
85  kkint32 zed = 0;
86  T* dest = new T[newSize];
87  while (zed < origSize) {dest[zed] = src[zed]; zed++;}
88  while (zed < newSize) {dest[zed] = (T)0; zed++;}
89  delete src;
90  return dest;
91  } /* GrowAllocation */
__int32 kkint32
Definition: KKBaseTypes.h:88
Kernel_Type SVM289_MFS::Kernel_Type_FromStr ( KKStr  s)

Definition at line 564 of file svm2.cpp.

References KKB::KKStr::EqualIgnoreCase(), Kernel_NULL, LINEAR, KKB::KKStr::operator==(), POLY, PRECOMPUTED, RBF, SIGMOID, and KKB::KKStr::Upper().

Referenced by SVM289_MFS::svm_parameter::ParseTabDelStr(), SVM289_MFS::svm_parameter::ProcessSvmParameter(), and SVM289_MFS::Svm_Model::ReadXML().

565  {
566  s.Upper ();
567  if ((s.EqualIgnoreCase ("LINEAR")) || (s == "0")) return Kernel_Type::LINEAR;
568  if ((s.EqualIgnoreCase ("POLYNOMIAL")) || (s.EqualIgnoreCase ("POLY")) || (s == "1")) return Kernel_Type::POLY;
569  if ((s.EqualIgnoreCase ("RBF")) || (s == "2")) return Kernel_Type::RBF;
570  if ((s.EqualIgnoreCase ("SIGMOID")) || (s == "3")) return Kernel_Type::SIGMOID;
571  if ((s.EqualIgnoreCase ("PRECOMPUTED")) || (s == "4")) return Kernel_Type::PRECOMPUTED;
572 
574  }
bool EqualIgnoreCase(const KKStr &s2) const
Definition: KKStr.cpp:1250
#define LINEAR
Definition: UsfCasCor.h:221
void Upper()
Converts all characters in string to their Upper case equivalents via &#39;toupper&#39;.
Definition: KKStr.cpp:2461
#define SIGMOID
Definition: UsfCasCor.h:219
KKStr SVM289_MFS::Kernel_Type_ToStr ( Kernel_Type  kernelType)

Definition at line 580 of file svm2.cpp.

References LINEAR, POLY, PRECOMPUTED, RBF, and SIGMOID.

Referenced by SVM289_MFS::svm_parameter::ToTabDelStr(), and SVM289_MFS::Svm_Model::WriteXML().

581 {
582  switch (kernelType)
583  {
584  case Kernel_Type::LINEAR: return "linear";
585  case Kernel_Type::POLY: return "polynomial";
586  case Kernel_Type::RBF: return "rbf";
587  case Kernel_Type::SIGMOID: return "sigmoid";
588  case Kernel_Type::PRECOMPUTED: return "precomputed";
589  }
590 
591  return "";
592 }
#define LINEAR
Definition: UsfCasCor.h:221
#define SIGMOID
Definition: UsfCasCor.h:219
void SVM289_MFS::multiclass_probability ( kkint32  numClasses,
double **  pairwiseProbs,
double *  classProb 
)

Implements method 2 from the multiclass_prob paper by Wu, Lin, and Weng

Parameters
[in]numClasses
[in]pairwiseProbs
[out]classProb
Parameters
numClassesNumber of Classes.
pairwiseProbsPair-wise Probabilities.
classProbClass Probability

Definition at line 3009 of file svm2.cpp.

References info().

3013 {
3014  kkint32 t,j;
3015  kkint32 iter = 0;
3016  kkint32 max_iter = Max (100, numClasses);
3017 
3018  double** Q = new double*[numClasses];
3019  double* Qp = new double[numClasses];
3020  double pQp;
3021  double eps = 0.005 / numClasses;
3022 
3023  for (t = 0; t < numClasses; ++t)
3024  {
3025  classProb[t] = 1.0 / numClasses; // Valid if k = 1
3026  Q[t] = new double[numClasses];
3027  for (kkint32 i = 0; i < numClasses; ++i)
3028  Q[t][i] = 0;
3029 
3030  Q[t][t] = 0;
3031  for (j = 0; j < t; j++)
3032  {
3033  Q[t][t] += pairwiseProbs[j][t] * pairwiseProbs[j][t];
3034  Q[t][j] = Q[j][t];
3035  }
3036 
3037  for (j = t + 1; j < numClasses; ++j)
3038  {
3039  Q[t][t] += pairwiseProbs[j][t] * pairwiseProbs[j][t];
3040  Q[t][j] =- pairwiseProbs[j][t] * pairwiseProbs[t][j];
3041  }
3042  }
3043 
3044  for (iter = 0; iter < max_iter; iter++)
3045  {
3046  // stopping condition, recalculate QP,pQP for numerical accuracy
3047  pQp = 0;
3048  for (t = 0; t < numClasses; t++)
3049  {
3050  Qp[t] = 0;
3051  for (j = 0; j < numClasses; j++)
3052  Qp[t] += Q[t][j] * classProb[j];
3053  pQp += classProb[t] * Qp[t];
3054  }
3055  double max_error = 0;
3056  for (t = 0; t < numClasses; ++t)
3057  {
3058  double error = fabs (Qp[t] - pQp);
3059  if (error > max_error)
3060  max_error = error;
3061  }
3062 
3063  if (max_error < eps)
3064  break;
3065 
3066  for (t = 0; t < numClasses; ++t)
3067  {
3068  double diff = (-Qp[t] +pQp) / Q[t][t];
3069  classProb[t] += diff;
3070  pQp = (pQp + diff * (diff * Q[t][t] + 2 * Qp[t])) / (1 + diff) / (1 + diff);
3071  for (j = 0; j < numClasses; ++j)
3072  {
3073  Qp[j] = (Qp[j] + diff * Q[t][j]) / (1 + diff);
3074  classProb[j] /= (1 + diff);
3075  }
3076  }
3077  }
3078  if (iter >= max_iter)
3079  info ("Exceeds max_iter in multiclass_prob\n");
3080 
3081  for (t = 0; t < numClasses; ++t)
3082  {delete Q[t]; Q[t] = NULL;}
3083 
3084  delete[] Q; Q = NULL;
3085  delete[] Qp; Qp = NULL;
3086 } /* multiclass_probability */
__int32 kkint32
Definition: KKBaseTypes.h:88
static void info(const char *fmt,...)
Definition: svm2.cpp:606
T Max(T a, T b)
generic Max function, Both parameters must be of the same type.
Definition: KKBaseTypes.h:181
double SVM289_MFS::powi ( double  base,
kkint32  times 
)
inline

Definition at line 94 of file svm2.cpp.

Referenced by SVM289_MFS::Kernel::k_function().

95  {
96  double tmp = base, ret = 1.0;
97 
98  for (kkint32 t = times; t > 0; t /= 2)
99  {
100  if ((t % 2) == 1)
101  ret *= tmp;
102  tmp = tmp * tmp;
103  }
104  return ret;
105  }
__int32 kkint32
Definition: KKBaseTypes.h:88
double SVM289_MFS::sigmoid_predict ( double  decision_value,
double  A,
double  B 
)

Definition at line 2986 of file svm2.cpp.

2990 {
2991  double fApB = decision_value * A + B;
2992  if (fApB >= 0)
2993  return exp (-fApB) / (1.0 + exp (-fApB));
2994  else
2995  return 1.0 / (1 + exp (fApB));
2996 } /* sigmoid_predict */
void SVM289_MFS::sigmoid_train ( kkint32  numExamples,
const double *  dec_values,
const double *  labels,
double &  A,
double &  B 
)

Definition at line 2850 of file svm2.cpp.

References info().

Referenced by svm_binary_svc_probability().

2856 {
2857  double prior1 = 0;
2858  double prior0 = 0;
2859  kkint32 i;
2860 
2861  for (i = 0; i < numExamples; ++i)
2862  {
2863  if (labels[i] > 0)
2864  prior1 += 1;
2865  else
2866  prior0 += 1;
2867  }
2868 
2869  kkint32 max_iter = 100; // Maximal number of iterations
2870  double min_step = 1e-10; // Minimal step taken in line search
2871  double sigma = 1e-12; // For numerically strict PD of Hessian
2872  double eps = 1e-5;
2873  double hiTarget = (prior1 + 1.0) / (prior1 + 2.0);
2874  double loTarget = 1 / (prior0 + 2.0);
2875  double* t = new double[numExamples];
2876  double fApB, p, q, h11, h22, h21, g1, g2, det, dA, dB, gd, stepsize;
2877  double newA, newB, newf, d1, d2;
2878  kkint32 iter;
2879 
2880  // Initial Point and Initial Fun Value
2881  A = 0.0;
2882  B = log ((prior0 + 1.0) / (prior1 + 1.0));
2883  double fval = 0.0;
2884 
2885  for (i = 0; i < numExamples; ++i)
2886  {
2887  if (labels[i] > 0)
2888  t[i] = hiTarget;
2889  else
2890  t[i] = loTarget;
2891 
2892  fApB = dec_values[i] * A + B;
2893 
2894  if (fApB >= 0)
2895  fval += t[i] * fApB + log (1 + exp (-fApB));
2896  else
2897  fval += (t[i] - 1) * fApB + log (1 + exp (fApB));
2898  }
2899 
2900  for (iter=0; iter < max_iter; iter++)
2901  {
2902  // Update Gradient and Hessian (use H' = H + sigma I)
2903  h11 = sigma; // numerically ensures strict PD
2904  h22 = sigma;
2905  h21 = 0.0;
2906  g1 = 0.0;
2907  g2 = 0.0;
2908  for (i = 0; i < numExamples; i++)
2909  {
2910  fApB = dec_values[i] * A + B;
2911  if (fApB >= 0)
2912  {
2913  p = exp (-fApB) / (1.0 + exp(-fApB));
2914  q = 1.0 / (1.0 + exp(-fApB));
2915  }
2916  else
2917  {
2918  p = 1.0 / (1.0 + exp (fApB));
2919  q = exp (fApB) / (1.0 + exp (fApB));
2920  }
2921 
2922  d2 = p * q;
2923  h11 += dec_values[i] * dec_values[i] * d2;
2924  h22 += d2;
2925  h21 += dec_values[i] * d2;
2926  d1 = t[i] - p;
2927  g1 += dec_values[i] * d1;
2928  g2 += d1;
2929  }
2930 
2931  // Stopping Criteria
2932  if ((fabs (g1) < eps) && (fabs(g2) < eps))
2933  break;
2934 
2935  // Finding Newton direction: -inv(H') * g
2936  det = h11 * h22 - h21 * h21;
2937  dA = -(h22*g1 - h21 * g2) / det;
2938  dB = -(-h21 * g1 + h11 * g2) / det;
2939  gd = g1 * dA + g2 * dB;
2940 
2941 
2942  stepsize = 1; // Line Search
2943  while (stepsize >= min_step)
2944  {
2945  newA = A + stepsize * dA;
2946  newB = B + stepsize * dB;
2947 
2948  // New function value
2949  newf = 0.0;
2950  for (i = 0; i < numExamples; i++)
2951  {
2952  fApB = dec_values[i] * newA + newB;
2953  if (fApB >= 0)
2954  newf += t[i] * fApB + log (1 + exp (-fApB));
2955  else
2956  newf += (t[i] - 1) * fApB + log (1 + exp (fApB));
2957  }
2958 
2959  // Check sufficient decrease
2960  if (newf < fval + 0.0001 * stepsize * gd)
2961  {
2962  A = newA;
2963  B = newB;
2964  fval = newf;
2965  break;
2966  }
2967  else
2968  stepsize = stepsize / 2.0;
2969  }
2970 
2971  if (stepsize < min_step)
2972  {
2973  info("Line search fails in two-class probability estimates\n");
2974  break;
2975  }
2976  }
2977 
2978  if (iter >= max_iter)
2979  info ("Reaching maximal iterations in two-class probability estimates\n");
2980 
2981  delete[] t; t = NULL;
2982 } /* sigmoid_train */
__int32 kkint32
Definition: KKBaseTypes.h:88
static void info(const char *fmt,...)
Definition: svm2.cpp:606
void SVM289_MFS::solve_c_svc ( const svm_problem prob,
const svm_parameter param,
double *  alpha,
Solver::SolutionInfo si,
double  Cp,
double  Cn,
RunLog _log 
)

Definition at line 2414 of file svm2.cpp.

References SVM289_MFS::svm_parameter::eps, SVM289_MFS::svm_problem::numTrainExamples, SVM289_MFS::svm_parameter::shrinking, SVM289_MFS::Solver::Solve(), SVM289_MFS::SVC_Q::SVC_Q(), and SVM289_MFS::svm_problem::y.

Referenced by svm_train_one().

2422 {
2423 
2424  kkint32 numTrainExamples = prob->numTrainExamples;
2425  double* minus_ones = new double[numTrainExamples];
2426  schar* y = new schar [numTrainExamples];
2427 
2428  kkint32 i;
2429 
2430  for (i = 0; i < numTrainExamples; ++i)
2431  {
2432  alpha[i] = 0;
2433  minus_ones[i] = -1;
2434  if (prob->y[i] > 0)
2435  y[i] = +1;
2436  else
2437  y[i] = -1;
2438  }
2439 
2440  Solver s;
2441 
2442  SVC_Q* jester = new SVC_Q (*prob, *param, y, _log);
2443 
2444  s.Solve (numTrainExamples,
2445  *jester,
2446  minus_ones,
2447  y,
2448  alpha,
2449  Cp,
2450  Cn,
2451  param->eps,
2452  si,
2453  param->shrinking
2454  );
2455  delete jester;
2456  jester = NULL;
2457 
2458  double sum_alpha =0;
2459 
2460  for (i = 0; i < numTrainExamples; i++)
2461  sum_alpha += alpha[i];
2462 
2463  //if (Cp == Cn)
2464  // info ("nu = %f\n", sum_alpha / (Cp * prob->numTrainExamples));
2465 
2466  for (i = 0; i < numTrainExamples; i++)
2467  alpha[i] *= y[i];
2468 
2469  delete[] minus_ones;
2470  delete[] y;
2471 } /* solve_c_svc */
__int32 kkint32
Definition: KKBaseTypes.h:88
void Solve(kkint32 l, QMatrix &Q, const double *p_, const schar *y_, double *alpha_, double Cp, double Cn, double eps, SolutionInfo *si, kkint32 shrinking)
Definition: svm2.cpp:1278
signed char schar
Definition: svm2.h:289
kkint32 numTrainExamples
Definition: svm2.h:63
void SVM289_MFS::solve_epsilon_svr ( const svm_problem prob,
const svm_parameter param,
double *  alpha,
Solver::SolutionInfo si,
RunLog _log 
)

Definition at line 2613 of file svm2.cpp.

References SVM289_MFS::svm_parameter::C, SVM289_MFS::svm_parameter::eps, info(), SVM289_MFS::svm_problem::numTrainExamples, SVM289_MFS::svm_parameter::p, SVM289_MFS::svm_parameter::shrinking, SVM289_MFS::Solver::Solve(), SVM289_MFS::SVR_Q::SVR_Q(), and SVM289_MFS::svm_problem::y.

Referenced by svm_train_one().

2619 {
2620  kkint32 numTrainExamples = prob->numTrainExamples;
2621  double* alpha2 = new double [2 * numTrainExamples];
2622  double* linear_term = new double [2 * numTrainExamples];
2623  schar* y = new schar[2 * numTrainExamples];
2624  kkint32 i;
2625 
2626  for (i = 0; i < numTrainExamples; ++i)
2627  {
2628  alpha2[i] = 0;
2629  linear_term[i] = param->p - prob->y[i];
2630  y[i] = 1;
2631 
2632  alpha2 [i + numTrainExamples] = 0;
2633  linear_term [i + numTrainExamples] = param->p + prob->y[i];
2634  y [i + numTrainExamples] = -1;
2635  }
2636 
2637 
2638  SVR_Q* jester = new SVR_Q (*prob, *param, _log);
2639  Solver s;
2640  s.Solve (2 * numTrainExamples,
2641  *jester,
2642  linear_term,
2643  y,
2644  alpha2,
2645  param->C,
2646  param->C,
2647  param->eps,
2648  si,
2649  param->shrinking
2650  );
2651 
2652  delete jester;
2653  jester = NULL;
2654 
2655  double sum_alpha = 0;
2656  for (i = 0; i < numTrainExamples; i++)
2657  {
2658  alpha[i] = alpha2[i] - alpha2[i + numTrainExamples];
2659  sum_alpha += fabs (alpha[i]);
2660  }
2661 
2662  info ("nu = %f\n", sum_alpha / (param->C * numTrainExamples));
2663 
2664  delete[] alpha2;
2665  delete[] linear_term;
2666  delete[] y;
2667 } /* solve_epsilon_svr */
__int32 kkint32
Definition: KKBaseTypes.h:88
static void info(const char *fmt,...)
Definition: svm2.cpp:606
void Solve(kkint32 l, QMatrix &Q, const double *p_, const schar *y_, double *alpha_, double Cp, double Cn, double eps, SolutionInfo *si, kkint32 shrinking)
Definition: svm2.cpp:1278
signed char schar
Definition: svm2.h:289
kkint32 numTrainExamples
Definition: svm2.h:63
void SVM289_MFS::solve_nu_svc ( const svm_problem prob,
const svm_parameter param,
double *  alpha,
Solver::SolutionInfo si,
RunLog _log 
)

Definition at line 2476 of file svm2.cpp.

References SVM289_MFS::svm_parameter::eps, info(), SVM289_MFS::svm_parameter::nu, SVM289_MFS::svm_problem::numTrainExamples, SVM289_MFS::Solver::SolutionInfo::obj, SVM289_MFS::Solver::SolutionInfo::r, SVM289_MFS::Solver::SolutionInfo::rho, SVM289_MFS::svm_parameter::shrinking, SVM289_MFS::Solver_NU::Solve(), SVM289_MFS::SVC_Q::SVC_Q(), SVM289_MFS::Solver::SolutionInfo::upper_bound_n, SVM289_MFS::Solver::SolutionInfo::upper_bound_p, and SVM289_MFS::svm_problem::y.

Referenced by svm_train_one().

2482 {
2483  kkint32 i;
2484  kkint32 numTrainExamples = prob->numTrainExamples;
2485  double nu = param->nu;
2486 
2487  schar *y = new schar[numTrainExamples];
2488 
2489  for (i = 0; i < numTrainExamples; i++)
2490  {
2491  if (prob->y[i] > 0)
2492  y[i] = +1;
2493  else
2494  y[i] = -1;
2495  }
2496 
2497 
2498  double sum_pos = nu * numTrainExamples / 2;
2499  double sum_neg = nu * numTrainExamples / 2;
2500 
2501  for (i = 0; i < numTrainExamples; i++)
2502  {
2503  if (y[i] == +1)
2504  {
2505  alpha[i] = Min(1.0, sum_pos);
2506  sum_pos -= alpha[i];
2507  }
2508  else
2509  {
2510  alpha[i] = Min(1.0,sum_neg);
2511  sum_neg -= alpha[i];
2512  }
2513  }
2514 
2515  double *zeros = new double[numTrainExamples];
2516 
2517  for (i = 0; i < numTrainExamples; i++)
2518  zeros[i] = 0;
2519 
2521 
2522  SVC_Q* jester = new SVC_Q (*prob, *param, y, _log);
2523 
2524  s.Solve (numTrainExamples,
2525  *jester,
2526  zeros,
2527  y,
2528  alpha,
2529  1.0,
2530  1.0,
2531  param->eps,
2532  si,
2533  param->shrinking
2534  );
2535 
2536  delete jester;
2537  jester = NULL;
2538 
2539  double r = si->r;
2540 
2541  info ("C = %f\n",1/r);
2542 
2543  for (i = 0; i < numTrainExamples; ++i)
2544  alpha[i] *= y[i] / r;
2545 
2546  si->rho /= r;
2547  si->obj /= (r * r);
2548  si->upper_bound_p = 1 / r;
2549  si->upper_bound_n = 1 / r;
2550 
2551  delete[] y;
2552  delete[] zeros;
2553 } /* solve_nu_svc */
__int32 kkint32
Definition: KKBaseTypes.h:88
void Solve(kkint32 l, QMatrix &Q, const double *p, const schar *y, double *alpha, double Cp, double Cn, double eps, SolutionInfo *si, kkint32 shrinking)
Definition: svm2.cpp:1842
static void info(const char *fmt,...)
Definition: svm2.cpp:606
signed char schar
Definition: svm2.h:289
kkint32 numTrainExamples
Definition: svm2.h:63
kkint32 Min(kkint32 x1, kkint32 x2)
Definition: Raster.cpp:229
void SVM289_MFS::solve_one_class ( const svm_problem prob,
const svm_parameter param,
double *  alpha,
Solver::SolutionInfo si,
RunLog _log 
)

Definition at line 2558 of file svm2.cpp.

References SVM289_MFS::svm_parameter::eps, SVM289_MFS::svm_parameter::nu, SVM289_MFS::svm_problem::numTrainExamples, SVM289_MFS::ONE_CLASS_Q::ONE_CLASS_Q(), SVM289_MFS::svm_parameter::shrinking, and SVM289_MFS::Solver::Solve().

Referenced by svm_train_one().

2564 {
2565  kkint32 numTrainExamples = prob->numTrainExamples;
2566 
2567  double* zeros = new double [numTrainExamples];
2568  schar* ones = new schar [numTrainExamples];
2569  kkint32 i;
2570 
2571  kkint32 n = (kkint32)(param->nu * prob->numTrainExamples); // # of alpha's at upper bound
2572 
2573  for (i = 0; i < n; i++)
2574  alpha[i] = 1;
2575 
2576  if (n < prob->numTrainExamples)
2577  alpha[n] = param->nu * prob->numTrainExamples - n;
2578 
2579  for (i = n + 1; i < numTrainExamples; i++)
2580  alpha[i] = 0;
2581 
2582  for (i = 0; i < numTrainExamples; i++)
2583  {
2584  zeros[i] = 0;
2585  ones [i] = 1;
2586  }
2587 
2588  ONE_CLASS_Q* jester = new ONE_CLASS_Q (*prob, *param, _log);
2589 
2590  Solver s;
2591  s.Solve (numTrainExamples,
2592  *jester,
2593  zeros,
2594  ones,
2595  alpha,
2596  1.0,
2597  1.0,
2598  param->eps,
2599  si,
2600  param->shrinking
2601  );
2602 
2603  delete jester;
2604  jester = NULL;
2605 
2606  delete[] zeros;
2607  delete[] ones;
2608 } /* solve_one_class */
__int32 kkint32
Definition: KKBaseTypes.h:88
void Solve(kkint32 l, QMatrix &Q, const double *p_, const schar *y_, double *alpha_, double Cp, double Cn, double eps, SolutionInfo *si, kkint32 shrinking)
Definition: svm2.cpp:1278
signed char schar
Definition: svm2.h:289
kkint32 numTrainExamples
Definition: svm2.h:63
const char* SVM289_MFS::svm_check_parameter ( const struct svm_problem prob,
const struct svm_parameter param 
)
kkint32 SVM289_MFS::svm_check_probability_model ( const struct Svm_Model model)
void SVM289_MFS::svm_cross_validation ( const svm_problem prob,
const svm_parameter param,
kkint32  nr_fold,
double *  target,
RunLog log 
)

Definition at line 3661 of file svm2.cpp.

References C_SVC, SVM289_MFS::svm_problem::FileDesc(), NU_SVC, SVM289_MFS::svm_problem::numTrainExamples, SVM289_MFS::svm_parameter::probability, SVM289_MFS::svm_problem::SelFeatures(), svm_destroy_model(), svm_get_nr_class(), svm_group_classes(), SVM289_MFS::svm_problem::svm_problem(), svm_train(), SVM289_MFS::svm_parameter::svm_type, and SVM289_MFS::svm_problem::y.

Referenced by svm_svr_probability().

3667 {
3668  kkint32 i;
3669  kkint32 *fold_start = new kkint32[nr_fold + 1];
3670  kkint32 numTrainExamples = prob.numTrainExamples;
3671  kkint32 *perm = new kkint32[numTrainExamples];
3672  kkint32 nr_class;
3673 
3674  // stratified cv may not give leave-one-out rate
3675  // Each class to l folds -> some folds may have zero elements
3676  if ((param.svm_type == SVM_Type::C_SVC || param.svm_type == SVM_Type::NU_SVC) &&
3677  (nr_fold < numTrainExamples)
3678  )
3679  {
3680  kkint32 *start = NULL;
3681  kkint32 *label = NULL;
3682  kkint32 *count = NULL;
3683  svm_group_classes (&prob, &nr_class, &label, &start, &count, perm);
3684 
3685  // random shuffle and then data grouped by fold using the array perm
3686  kkint32 *fold_count = new kkint32[nr_fold];
3687  kkint32 c;
3688  kkint32 *index = new kkint32[numTrainExamples];
3689  for (i = 0; i < numTrainExamples; i++)
3690  index[i] = perm[i];
3691 
3692  for (c = 0; c < nr_class; c++)
3693  {
3694  for (i = 0; i < count[c]; i++)
3695  {
3696  kkint32 j = i + rand() % (count[c]-i);
3697  SVM289_MFS::swap (index[start[c]+j], index[start[c]+i]);
3698  }
3699  }
3700 
3701  for (i = 0; i < nr_fold; ++i)
3702  {
3703  fold_count[i] = 0;
3704  for (c = 0; c < nr_class; ++c)
3705  fold_count[i] += (i + 1) * count[c] / nr_fold - i * count[c] / nr_fold;
3706  }
3707 
3708  fold_start[0] = 0;
3709  for (i = 1; i <= nr_fold; i++)
3710  fold_start[i] = fold_start[i-1] + fold_count[i-1];
3711 
3712  for (c=0; c<nr_class;c++)
3713  {
3714  for(i=0;i<nr_fold;i++)
3715  {
3716  kkint32 begin = start[c]+i*count[c]/nr_fold;
3717  kkint32 end = start[c]+(i+1)*count[c]/nr_fold;
3718  for(kkint32 j=begin;j<end;j++)
3719  {
3720  perm[fold_start[i]] = index[j];
3721  fold_start[i]++;
3722  }
3723  }
3724  }
3725 
3726  fold_start[0]=0;
3727  for (i=1;i<=nr_fold;i++)
3728  fold_start[i] = fold_start[i-1]+fold_count[i-1];
3729 
3730  delete start; start = NULL;
3731  delete label; label = NULL;
3732  delete count; count = NULL;
3733  delete index; index = NULL;
3734  delete fold_count; fold_count = NULL;
3735  }
3736  else
3737  {
3738  for (i = 0; i < numTrainExamples; ++i)
3739  perm[i]=i;
3740 
3741  for (i = 0; i < numTrainExamples; ++i)
3742  {
3743  kkint32 j = i + rand() % (numTrainExamples - i);
3744  SVM289_MFS::swap (perm[i], perm[j]);
3745  }
3746  for (i = 0; i <= nr_fold; i++)
3747  fold_start[i] = i * numTrainExamples / nr_fold;
3748  }
3749 
3750  for (i = 0; i < nr_fold; i++)
3751  {
3752  kkint32 begin = fold_start[i];
3753  kkint32 end = fold_start[i+1];
3754  kkint32 j,k;
3755 
3756  svm_problem subprob (prob.SelFeatures (), prob.FileDesc (), log);
3757 
3758  subprob.numTrainExamples = numTrainExamples - (end - begin);
3759  //subprob.x = Malloc(struct svm_node*,subprob.l);
3760  // subprob.x will be initialized to an empty FeatureVectorList
3761  subprob.y = new double[subprob.numTrainExamples];
3762 
3763  k = 0;
3764  for (j = 0; j < begin; j++)
3765  {
3766  //subprob.x[k] = prob->x[perm[j]];
3767  subprob.x.PushOnBack (prob.x.IdxToPtr (perm[j]));
3768  subprob.y[k] = prob.y[perm[j]];
3769  ++k;
3770  }
3771 
3772  for (j = end; j < numTrainExamples; j++)
3773  {
3774  //subprob.x[k] = prob->x[perm[j]];
3775  subprob.x.PushOnBack (prob.x.IdxToPtr (perm[j]));
3776  subprob.y[k] = prob.y[perm[j]];
3777  ++k;
3778  }
3779 
3780  Svm_Model* submodel = svm_train (subprob, param, log);
3781  if (param.probability &&
3782  (param.svm_type == SVM_Type::C_SVC || param.svm_type == SVM_Type::NU_SVC))
3783  {
3784  double *prob_estimates = new double[svm_get_nr_class (submodel)];
3785  kkint32 *votes = new kkint32 [svm_get_nr_class (submodel)];
3786 
3787  for (j = begin; j < end; j++)
3788  target[perm[j]] = svm_predict_probability (submodel, prob.x[perm[j]], prob_estimates, votes);
3789  delete prob_estimates;
3790  prob_estimates = NULL;
3791  delete votes;
3792  votes = NULL;
3793  }
3794  else
3795  {
3796  for (j = begin; j < end; j++)
3797  target[perm[j]] = svm_predict (submodel, prob.x[perm[j]]);
3798  }
3799 
3800  svm_destroy_model (submodel);
3801  delete submodel;
3802  submodel = NULL;
3803 
3804  //free(subprob.x);
3805  delete subprob.y; subprob.y = NULL;
3806  }
3807 
3808  delete fold_start; fold_start = NULL;
3809  delete perm; perm = NULL;
3810 } /* svm_cross_validation */
double svm_predict_probability(Svm_Model *model, const FeatureVector &x, double *prob_estimates, kkint32 *votes)
Definition: svm2.cpp:3988
FeatureVectorList x
Definition: svm2.h:65
__int32 kkint32
Definition: KKBaseTypes.h:88
FileDescPtr FileDesc() const
Definition: svm2.cpp:170
EntryPtr IdxToPtr(kkuint32 idx) const
Definition: KKQueue.h:732
struct SvmModel233 * svm_train(const struct svm_problem *prob, const struct svm_parameter *param)
void swap(T &x, T &y)
Definition: svm2.h:283
double svm_predict(const struct SvmModel233 *model, const struct svm_node *x)
kkint32 svm_get_nr_class(const SvmModel233 *model)
Definition: svm.cpp:3672
void svm_group_classes(const svm_problem *prob, kkint32 *nr_class_ret, kkint32 **label_ret, kkint32 **start_ret, kkint32 **count_ret, kkint32 *perm)
Definition: svm2.cpp:3271
const FeatureNumList & SelFeatures() const
Definition: svm2.h:61
kkint32 numTrainExamples
Definition: svm2.h:63
void svm_destroy_model(struct SvmModel233 *model)
Definition: svm.cpp:4442
void SVM289_MFS::svm_destroy_model ( struct Svm_Model *&  model)

Definition at line 4611 of file svm2.cpp.

Referenced by svm_cross_validation(), KKMLL::ModelSvmBase::TrainModel(), and KKMLL::ModelSvmBase::~ModelSvmBase().

4612 {
4613  //if (model->weOwnSupportVectors && (model->l > 0))
4614  // free ((void *)(model->SV[0]));
4615  if (model->weOwnSupportVectors)
4616  model->SV.Owner (true);
4617  else
4618  model->SV.Owner (false);
4619 
4620  delete model;
4621  model = NULL;
4622 }
bool Owner() const
Definition: KKQueue.h:305
FeatureVectorList SV
Definition: svm2.h:207
bool weOwnSupportVectors
Definition: svm2.h:220
void SVM289_MFS::svm_destroy_param ( struct svm_parameter *&  param)

Definition at line 4627 of file svm2.cpp.

4628 {
4629  delete param;
4630  param = NULL;
4631 }
void SVM289_MFS::svm_get_labels ( const struct Svm_Model model,
kkint32 label 
)
kkint32 SVM289_MFS::svm_get_nr_class ( const struct Svm_Model model)

Referenced by svm_cross_validation().

kkint32 SVM289_MFS::svm_get_svm_type ( const struct Svm_Model model)
double SVM289_MFS::svm_get_svr_probability ( const struct Svm_Model model)
double SVM289_MFS::svm_predict ( const struct Svm_Model model,
const FeatureVector x 
)
double SVM289_MFS::svm_predict_probability ( Svm_Model model,
const FeatureVector x,
double *  prob_estimates,
kkint32 votes 
)

Definition at line 3988 of file svm2.cpp.

References C_SVC, SVM289_MFS::Svm_Model::DecValues(), SVM289_MFS::Svm_Model::label, SVM289_MFS::Svm_Model::NormalizeProbability(), SVM289_MFS::Svm_Model::nr_class, NU_SVC, SVM289_MFS::Svm_Model::PairwiseProb(), SVM289_MFS::Svm_Model::param, SVM289_MFS::Svm_Model::probA, SVM289_MFS::Svm_Model::probB, SVM289_MFS::Svm_Model::ProbEstimates(), SVM289_MFS::svm_parameter::probParam, svm_predict(), svm_predict_values(), and SVM289_MFS::svm_parameter::svm_type.

Referenced by KKMLL::ModelSvmBase::Predict(), and KKMLL::ModelSvmBase::ProbabilitiesByClass().

3993 {
3994  double probParam = model->param.probParam;
3995 
3996  if ((model->param.svm_type == SVM_Type::C_SVC || model->param.svm_type == SVM_Type::NU_SVC) &&
3997  ((model->probA != NULL && model->probB != NULL) || (probParam > 0.0))
3998  )
3999  {
4000  kkint32 i;
4001  kkint32 nr_class = model->nr_class;
4002 
4003  double* prob_estimates = model->ProbEstimates ();
4004  double* dec_values = model->DecValues ();
4005  double** pairwise_prob = model->PairwiseProb ();
4006 
4007  for (i = 0; i < nr_class; ++i)
4008  votes[i] = 0;
4009 
4010  svm_predict_values (model, x, dec_values);
4011 
4012  double min_prob = 1e-7;
4013 
4014  kkint32 k = 0;
4015  for (i = 0; i < nr_class; ++i)
4016  {
4017  for (kkint32 j = i + 1; j < nr_class; ++j)
4018  {
4019  if (probParam > 0.0)
4020  {
4021  double probability = (double)(1.0 / (1.0 + exp (-1.0 * probParam * dec_values[k])));
4022  pairwise_prob[i][j] = Min (Max (probability, min_prob), 1.0 - min_prob);
4023  pairwise_prob[j][i] = 1.0 - pairwise_prob[i][j];
4024  }
4025  else
4026  {
4027  pairwise_prob[i][j] = Min (Max (sigmoid_predict (dec_values[k], model->probA[k], model->probB[k]), min_prob), 1.0 - min_prob);
4028  pairwise_prob[j][i] = 1.0 - pairwise_prob[i][j];
4029  }
4030 
4031  if (pairwise_prob[i][j] > 0.5)
4032  votes[model->label[i]]++;
4033  else
4034  votes[model->label[j]]++;
4035 
4036  k++;
4037  }
4038  }
4039 
4040  // The 'pairwise_prob' and 'prob_estimates' variables are actually located
4041  // in 'model'. So by calling 'NormalizeProbability' we normalize
4042  // 'prob_estimates'.
4043  model->NormalizeProbability ();
4044 
4045  //multiclass_probability (nr_class, pairwise_prob, prob_estimates);
4046 
4047  kkint32 prob_max_idx = 0;
4048  for (i = 1; i < nr_class; i++)
4049  {
4050  if (prob_estimates[i] > prob_estimates[prob_max_idx])
4051  prob_max_idx = i;
4052  }
4053 
4054  for (i = 0; i < nr_class; i++)
4055  classProbabilities[model->label[i]] = prob_estimates[i];
4056 
4057  return model->label[prob_max_idx];
4058  }
4059  else
4060  {
4061  return svm_predict (model, x);
4062  }
4063 } /* svm_predict_probability */
__int32 kkint32
Definition: KKBaseTypes.h:88
svm_parameter param
Definition: svm2.h:204
kkint32 nr_class
Definition: svm2.h:205
double * probB
Definition: svm2.h:211
double ** PairwiseProb()
Definition: svm2.cpp:4319
void NormalizeProbability()
Derives multi-class probability.
Definition: svm2.cpp:4581
double svm_predict(const struct SvmModel233 *model, const struct svm_node *x)
kkint32 * label
Definition: svm2.h:216
double sigmoid_predict(double decision_value, double A, double B)
Definition: svm2.cpp:2986
double * ProbEstimates()
Definition: svm2.cpp:4311
T Max(T a, T b)
generic Max function, Both parameters must be of the same type.
Definition: KKBaseTypes.h:181
kkint32 Min(kkint32 x1, kkint32 x2)
Definition: Raster.cpp:229
double * probA
Definition: svm2.h:210
double * DecValues()
Definition: svm2.cpp:4303
void svm_predict_values(const Svm_Model *model, const FeatureVector &x, double *dec_values)
Definition: svm2.cpp:3856
void SVM289_MFS::svm_predict_values ( const Svm_Model model,
const FeatureVector x,
double *  dec_values 
)

Definition at line 3856 of file svm2.cpp.

References EPSILON_SVR, SVM289_MFS::Svm_Model::nr_class, SVM289_MFS::Svm_Model::nSV, NU_SVR, SVM289_MFS::Svm_Model::numSVs, ONE_CLASS, SVM289_MFS::Svm_Model::param, SVM289_MFS::Svm_Model::rho, SVM289_MFS::Svm_Model::sv_coef, and SVM289_MFS::svm_parameter::svm_type.

Referenced by svm_predict_probability().

3860 {
3861  if (model->param.svm_type == SVM_Type::ONE_CLASS ||
3862  model->param.svm_type == SVM_Type::EPSILON_SVR ||
3863  model->param.svm_type == SVM_Type::NU_SVR
3864  )
3865  {
3866  double *sv_coef = model->sv_coef[0];
3867  double sum = 0;
3868  for (kkint32 i = 0; i < model->numSVs; i++)
3869  sum += sv_coef[i] * Kernel::k_function (x,
3870  model->SV[i],
3871  model->param,
3872  model->selFeatures
3873  );
3874  sum -= model->rho[0];
3875  *dec_values = sum;
3876  }
3877  else
3878  {
3879  kkint32 i;
3880  kkint32 nr_class = model->nr_class;
3881  kkint32 numSVs = model->numSVs;
3882 
3883  double *kvalue = new double[numSVs];
3884  for (i = 0; i < numSVs; i++)
3885  kvalue[i] = Kernel::k_function (x, model->SV[i], model->param, model->selFeatures);
3886 
3887  kkint32 *start = new kkint32[nr_class];
3888  start[0] = 0;
3889  for (i = 1; i < nr_class; i++)
3890  start[i] = start[i-1]+model->nSV[i-1];
3891 
3892  kkint32 p=0;
3893  for (i = 0; i < nr_class; i++)
3894  {
3895  for (kkint32 j = i + 1; j < nr_class; j++)
3896  {
3897  double sum = 0;
3898  kkint32 si = start[i];
3899  kkint32 sj = start[j];
3900  kkint32 ci = model->nSV[i];
3901  kkint32 cj = model->nSV[j];
3902 
3903  kkint32 k;
3904  double *coef1 = model->sv_coef[j - 1];
3905  double *coef2 = model->sv_coef[i];
3906  for (k = 0; k < ci; k++)
3907  sum += coef1[si + k] * kvalue[si + k];
3908 
3909 
3910  for (k = 0; k < cj; k++)
3911  sum += coef2[sj + k] * kvalue[sj + k];
3912 
3913  sum -= model->rho[p];
3914  dec_values[p] = sum;
3915  p++;
3916  }
3917  }
3918 
3919  delete kvalue; kvalue = NULL;
3920  delete start; start = NULL;
3921  }
3922 } /* svm_predict_values */
double ** sv_coef
Definition: svm2.h:208
__int32 kkint32
Definition: KKBaseTypes.h:88
FeatureNumList selFeatures
Definition: svm2.h:212
svm_parameter param
Definition: svm2.h:204
kkint32 nr_class
Definition: svm2.h:205
kkint32 * nSV
Definition: svm2.h:217
FeatureVectorList SV
Definition: svm2.h:207
kkint32 numSVs
Definition: svm2.h:206
double * rho
Definition: svm2.h:209
Svm_Model * SVM289_MFS::svm_train ( const svm_problem prob,
const svm_parameter param,
RunLog log 
)

Definition at line 3345 of file svm2.cpp.

References SVM289_MFS::decision_function::alpha, SVM289_MFS::svm_parameter::C, EPSILON_SVR, KKMLL::FeatureVectorList::FeatureVectorList(), SVM289_MFS::svm_problem::FileDesc(), info(), SVM289_MFS::Svm_Model::label, SVM289_MFS::Svm_Model::nr_class, SVM289_MFS::svm_parameter::nr_weight, SVM289_MFS::Svm_Model::nSV, NU_SVR, SVM289_MFS::Svm_Model::numSVs, SVM289_MFS::svm_problem::numTrainExamples, ONE_CLASS, SVM289_MFS::Svm_Model::probA, SVM289_MFS::svm_parameter::probability, SVM289_MFS::Svm_Model::probB, SVM289_MFS::Svm_Model::rho, SVM289_MFS::decision_function::rho, SVM289_MFS::svm_problem::SelFeatures(), SVM289_MFS::Svm_Model::sv_coef, svm_binary_svc_probability(), svm_group_classes(), SVM289_MFS::Svm_Model::Svm_Model(), SVM289_MFS::svm_problem::svm_problem(), svm_svr_probability(), svm_train_one(), SVM289_MFS::svm_parameter::svm_type, SVM289_MFS::svm_parameter::weight, SVM289_MFS::svm_parameter::weight_label, SVM289_MFS::Svm_Model::weOwnSupportVectors, and SVM289_MFS::svm_problem::y.

Referenced by svm_binary_svc_probability(), svm_cross_validation(), and KKMLL::ModelSvmBase::TrainModel().

3349 {
3350  Svm_Model* model = new Svm_Model (param, prob.SelFeatures (), prob.FileDesc ());
3351 
3352  model->weOwnSupportVectors = false;
3353 
3354  if ((param.svm_type == SVM_Type::ONE_CLASS) ||
3355  (param.svm_type == SVM_Type::EPSILON_SVR) ||
3356  (param.svm_type == SVM_Type::NU_SVR)
3357  )
3358  {
3359  // regression or one-class-svm
3360  model->nr_class = 2;
3361  model->label = NULL;
3362  model->nSV = NULL;
3363  model->probA = NULL;
3364  model->probB = NULL;
3365  model->sv_coef = new double*[1];
3366 
3367  if (param.probability && (param.svm_type == SVM_Type::EPSILON_SVR || param.svm_type == SVM_Type::NU_SVR))
3368  {
3369  model->probA = new double[1];
3370  model->probA[0] = svm_svr_probability (prob, param, log);
3371  }
3372 
3373  decision_function f = svm_train_one (prob, param, 0, 0, log);
3374  model->rho = new double[1];
3375  model->rho[0] = f.rho;
3376 
3377  kkint32 nSV = 0;
3378  kkint32 i;
3379  for (i = 0; i < prob.numTrainExamples; ++i)
3380  {
3381  if (fabs(f.alpha[i]) > 0)
3382  ++nSV;
3383  }
3384 
3385  model->numSVs = nSV;
3386  //model->SV = Malloc(svm_node *,nSV);
3387  // model->SV is now a FeatureVectorList object that was initialized to empty and not owner in the constructor
3388  model->SV.Owner (true);
3389  model->sv_coef[0] = new double[nSV];
3390  kkint32 j = 0;
3391  for (i = 0; i < prob.numTrainExamples; ++i)
3392  {
3393  if (fabs (f.alpha[i]) > 0)
3394  {
3395  //model->SV[j] = prob->x[i];
3396  model->SV.PushOnBack (new FeatureVector (prob.x[i]));
3397  model->sv_coef[0][j] = f.alpha[i];
3398  ++j;
3399  }
3400  }
3401 
3402  delete f.alpha; f.alpha = NULL;
3403  }
3404  else
3405  {
3406  // Classification
3407  kkint32 l = prob.numTrainExamples;
3408  kkint32 nr_class;
3409  kkint32 *label = NULL;
3410  kkint32 *start = NULL;
3411  kkint32 *count = NULL;
3412  kkint32 *perm = new kkint32[l];
3413 
3414  // group training data of the same class
3415  svm_group_classes (&prob,
3416  &nr_class,
3417  &label,
3418  &start,
3419  &count,
3420  perm
3421  );
3422 
3423  kkint32 numBinaryCombos = nr_class * (nr_class - 1) / 2;
3424 
3425  //svm_node **x = Malloc(svm_node *,l);
3426  FeatureVectorList x (prob.FileDesc (), false);
3427 
3428  kkint32 i;
3429  for (i = 0; i < l; i++)
3430  {
3431  //x[i] = prob->x[perm[i]];
3432  x.PushOnBack (prob.x.IdxToPtr (perm[i]));
3433  }
3434 
3435  // calculate weighted C
3436  double* weighted_C = new double[nr_class];
3437  for (i = 0; i < nr_class; i++)
3438  weighted_C[i] = param.C;
3439 
3440  for (i = 0; i < param.nr_weight; i++)
3441  {
3442  kkint32 j;
3443  for (j = 0; j < nr_class; j++)
3444  {
3445  if (param.weight_label[i] == label[j])
3446  break;
3447  }
3448 
3449  if (j == nr_class)
3450  fprintf(stderr,"warning: class label %d specified in weight is not found\n", param.weight_label[i]);
3451  else
3452  weighted_C[j] *= param.weight[i];
3453  }
3454 
3455  // train k*(k-1)/2 models
3456 
3457  bool *nonzero = new bool[l];
3458 
3459  for (i = 0; i < l; i++)
3460  nonzero[i] = false;
3461 
3462  decision_function *f = new decision_function[numBinaryCombos];
3463 
3464  double* probA = NULL;
3465  double* probB = NULL;
3466 
3467  if (param.probability)
3468  {
3469  probA = new double[numBinaryCombos];
3470  probB = new double[numBinaryCombos];
3471  }
3472 
3473  kkint32 p = 0;
3474  for (i = 0; i < nr_class; i++)
3475  {
3476  for (kkint32 j = i + 1; j < nr_class; j++)
3477  {
3478  svm_problem sub_prob (prob.SelFeatures (), prob.FileDesc (), log);
3479  kkint32 si = start[i], sj = start[j];
3480  kkint32 ci = count[i], cj = count[j];
3481  sub_prob.numTrainExamples = ci + cj;
3482  //sub_prob.x = Malloc (svm_node *,sub_prob.l);
3483  sub_prob.y = new double[sub_prob.numTrainExamples];
3484  kkint32 k;
3485  for (k = 0; k < ci; k++)
3486  {
3487  //sub_prob.x[k] = x[si+k];
3488  sub_prob.x.PushOnBack (x.IdxToPtr (si + k));
3489  sub_prob.y[k] = +1;
3490  }
3491  for (k = 0; k < cj; k++)
3492  {
3493  //sub_prob.x[ci+k] = x[sj+k];
3494  sub_prob.x.PushOnBack (x.IdxToPtr (sj + k));
3495  sub_prob.y[ci + k] = -1;
3496  }
3497 
3498  if (param.probability)
3499  svm_binary_svc_probability (&sub_prob, &param, weighted_C[i], weighted_C[j], probA[p], probB[p], log);
3500 
3501 
3502  f[p] = svm_train_one (sub_prob, param, weighted_C[i], weighted_C[j], log);
3503 
3504  for (k = 0; k < ci; k++)
3505  {
3506  if (!nonzero[si + k] && fabs(f[p].alpha[k]) > 0)
3507  nonzero[si + k] = true;
3508  }
3509 
3510  for (k = 0; k < cj; k++)
3511  {
3512  if (!nonzero[sj + k] && fabs(f[p].alpha[ci+k]) > 0)
3513  nonzero[sj + k] = true;
3514  }
3515 
3516  //free(sub_prob.x);
3517  delete sub_prob.y;
3518  sub_prob.y = NULL;
3519  ++p;
3520  }
3521  }
3522 
3523 
3524  // At this point all the Binary Classifiers have been built. They are now going
3525  // to be packaged into one not so neat model.
3526 
3527  // build output
3528  model->nr_class = nr_class;
3529 
3530  model->label = new kkint32[nr_class];
3531  for (i = 0; i < nr_class; i++)
3532  model->label[i] = label[i];
3533 
3534  model->rho = new double[numBinaryCombos];
3535  for (i = 0; i < numBinaryCombos; i++)
3536  model->rho[i] = f[i].rho;
3537 
3538  if (param.probability)
3539  {
3540  model->probA = new double[numBinaryCombos];
3541  model->probB = new double[numBinaryCombos];
3542  for (i = 0; i < numBinaryCombos; i++)
3543  {
3544  model->probA[i] = probA[i];
3545  model->probB[i] = probB[i];
3546  }
3547  }
3548  else
3549  {
3550  model->probA = NULL;
3551  model->probB = NULL;
3552  }
3553 
3554  kkint32 total_sv = 0;
3555  kkint32* nz_count = new kkint32[nr_class];
3556 
3557  model->nSV = new kkint32[nr_class];
3558  for (i = 0; i < nr_class; i++)
3559  {
3560  kkint32 nSV = 0;
3561  for (kkint32 j = 0; j < count[i]; j++)
3562  {
3563  if (nonzero[start[i] + j])
3564  {
3565  ++nSV;
3566  ++total_sv;
3567  }
3568  }
3569  model->nSV[i] = nSV;
3570  nz_count[i] = nSV;
3571  }
3572 
3573  info("Total nSV = %d\n",total_sv);
3574 
3575  model->numSVs = total_sv;
3576  //model->SV = Malloc(svm_node *, total_sv);
3577  model->SV.DeleteContents ();
3578  model->SV.Owner (false);
3579  model->weOwnSupportVectors = false;
3580 
3581  p = 0;
3582  for (i = 0; i < l; i++)
3583  {
3584  if (nonzero[i])
3585  {
3586  //model->SV[p++] = x[i];
3587  model->SV.PushOnBack (x.IdxToPtr (i));
3588  p++;
3589  }
3590  }
3591 
3592  kkint32 *nz_start = new kkint32[nr_class];
3593  nz_start[0] = 0;
3594  for (i = 1; i < nr_class; i++)
3595  nz_start[i] = nz_start[i - 1] + nz_count[i - 1];
3596 
3597  model->sv_coef = new double*[nr_class - 1];
3598  for (i = 0; i < nr_class - 1; i++)
3599  model->sv_coef[i] = new double[total_sv];
3600 
3601  p = 0;
3602  for (i = 0; i < nr_class; i++)
3603  {
3604  for (kkint32 j = i + 1; j < nr_class; j++)
3605  {
3606  // classifier (i,j): coefficients with
3607  // i are in sv_coef[j-1][nz_start[i]...],
3608  // j are in sv_coef[i][nz_start[j]...]
3609 
3610  kkint32 si = start[i];
3611  kkint32 sj = start[j];
3612  kkint32 ci = count[i];
3613  kkint32 cj = count[j];
3614 
3615  kkint32 q = nz_start[i];
3616  kkint32 k;
3617 
3618  for (k = 0; k < ci; k++)
3619  {
3620  if (nonzero[si + k])
3621  model->sv_coef[j - 1][q++] = f[p].alpha[k];
3622  }
3623 
3624  q = nz_start[j];
3625  for (k = 0; k < cj; k++)
3626  {
3627  if (nonzero[sj + k])
3628  model->sv_coef[i][q++] = f[p].alpha[ci + k];
3629  }
3630  ++p;
3631  }
3632  }
3633 
3634  delete label; label = NULL;
3635  delete probA; probA = NULL;
3636  delete probB; probB = NULL;
3637  delete count; count = NULL;
3638  delete perm; perm = NULL;
3639  delete start; start = NULL;
3640  //free (x);
3641  delete weighted_C; weighted_C = NULL;
3642  delete nonzero; nonzero = NULL;
3643  for (i = 0; i < numBinaryCombos; i++)
3644  {
3645  delete f[i].alpha;
3646  f[i].alpha = NULL;
3647  }
3648  delete f; f = NULL;
3649  delete nz_count; nz_count = NULL;
3650  delete nz_start; nz_start = NULL;
3651  }
3652 
3653  return model;
3654 } /* svm_train */
FeatureVectorList x
Definition: svm2.h:65
void PushOnBack(FeatureVectorPtr image)
Overloading the PushOnBack function in KKQueue so we can monitor the Version and Sort Order...
double ** sv_coef
Definition: svm2.h:208
__int32 kkint32
Definition: KKBaseTypes.h:88
FileDescPtr FileDesc() const
Definition: svm2.cpp:170
EntryPtr IdxToPtr(kkuint32 idx) const
Definition: KKQueue.h:732
void svm_binary_svc_probability(const svm_problem *prob, const svm_parameter *param, double Cp, double Cn, double &probA, double &probB, RunLog &log)
Definition: svm2.cpp:3092
void DeleteContents()
Definition: KKQueue.h:321
bool Owner() const
Definition: KKQueue.h:305
kkint32 nr_class
Definition: svm2.h:205
double * probB
Definition: svm2.h:211
double svm_svr_probability(const svm_problem &prob, const svm_parameter &param, RunLog &log)
Definition: svm2.cpp:3226
kkint32 * nSV
Definition: svm2.h:217
Container class for FeatureVector derived objects.
static void info(const char *fmt,...)
Definition: svm2.cpp:606
decision_function svm_train_one(const svm_problem &prob, const svm_parameter &param, double Cp, double Cn, RunLog &_log)
Definition: svm2.cpp:2744
kkint32 * label
Definition: svm2.h:216
FeatureVectorList SV
Definition: svm2.h:207
kkint32 numSVs
Definition: svm2.h:206
kkint32 * weight_label
Definition: svm2.h:142
void svm_group_classes(const svm_problem *prob, kkint32 *nr_class_ret, kkint32 **label_ret, kkint32 **start_ret, kkint32 **count_ret, kkint32 *perm)
Definition: svm2.cpp:3271
const FeatureNumList & SelFeatures() const
Definition: svm2.h:61
kkint32 numTrainExamples
Definition: svm2.h:63
Represents a Feature Vector of a single example, labeled or unlabeled.
Definition: FeatureVector.h:59
bool weOwnSupportVectors
Definition: svm2.h:220
double * probA
Definition: svm2.h:210
double * rho
Definition: svm2.h:209
decision_function SVM289_MFS::svm_train_one ( const svm_problem prob,
const svm_parameter param,
double  Cp,
double  Cn,
RunLog _log 
)

Definition at line 2744 of file svm2.cpp.

References SVM289_MFS::decision_function::alpha, C_SVC, KKB::KKStr::Concat(), EPSILON_SVR, KKB::KKException::KKException(), NU_SVC, NU_SVR, SVM289_MFS::svm_problem::numTrainExamples, ONE_CLASS, SVM289_MFS::Solver::SolutionInfo::rho, SVM289_MFS::decision_function::rho, solve_c_svc(), solve_epsilon_svr(), solve_nu_svc(), solve_nu_svr(), solve_one_class(), SVM289_MFS::svm_parameter::svm_type, and SVM289_MFS::svm_problem::y.

Referenced by svm_train().

2750 {
2751  double* alpha = new double [prob.numTrainExamples];
2753 
2754  switch (param.svm_type)
2755  {
2756  case SVM_Type::C_SVC: solve_c_svc (&prob, &param, alpha, &si, Cp, Cn, _log); break;
2757  case SVM_Type::NU_SVC: solve_nu_svc (&prob, &param, alpha, &si, _log); break;
2758  case SVM_Type::ONE_CLASS: solve_one_class (&prob, &param, alpha, &si, _log); break;
2759  case SVM_Type::EPSILON_SVR: solve_epsilon_svr (&prob, &param, alpha, &si, _log); break;
2760  case SVM_Type::NU_SVR: solve_nu_svr (&prob, &param, alpha, &si, _log); break;
2761 
2762  default:
2763  {
2764  KKStr errMsg = "SVM289_MFS::svm_train_one ***ERROR*** Invalid Solver Defined.";
2765  errMsg << " Solver[" << (int)param.svm_type << "]";
2766  _log.Level (-1) << endl << endl << errMsg << endl << endl;
2767  throw KKException (errMsg);
2768  }
2769  }
2770 
2771  //info ("obj = %f, rho = %f\n", si.obj, si.rho);
2772 
2773  // output SVs
2774 
2775  std::vector<kkint32> SVIndex; // Normalize by Margin Width(NMW).
2776 
2777  kkint32 nSV = 0;
2778  kkint32 nBSV = 0;
2779  for (kkint32 i = 0; i < prob.numTrainExamples; i++)
2780  {
2781  if (fabs (alpha[i]) > 0)
2782  {
2783  ++nSV;
2784  SVIndex.push_back (i); // NMW
2785  if (prob.y[i] > 0)
2786  {
2787  if (fabs (alpha[i]) >= si.upper_bound_p)
2788  {
2789  ++nBSV;
2790  // BSVIndex.insert (prob->index[i]); // NMW
2791  }
2792  }
2793  else
2794  {
2795  if (fabs (alpha[i]) >= si.upper_bound_n)
2796  {
2797  ++nBSV;
2798  // BSVIndex.insert (prob->index[i]);
2799  }
2800  }
2801  }
2802  }
2803 
2804 
2805  //**********************************************************************************
2806  // Code to normalize by margin width.
2807 
2808 
2809  double sum=0.0;
2810  std::vector<kkint32>::iterator it,it2;
2811  double kvalue = 0.0;
2812 
2813  for (it = SVIndex.begin(); it < SVIndex.end(); it++)
2814  {
2815  for (it2 = SVIndex.begin(); it2 < SVIndex.end(); it2++)
2816  {
2817  kkint32 k = *it;
2818  kkint32 kk = *it2;
2819 
2820  kvalue = Kernel::k_function (prob.x[k], prob.x[kk], param, prob.SelFeatures ());
2821 
2822  sum += prob.y[k] * prob.y[kk] * alpha[k] * alpha[kk] * kvalue;
2823  }
2824  }
2825 
2826  sum /= SVIndex.size();
2827  sum = sqrt(sum);
2828 
2829  for (it = SVIndex.begin(); it < SVIndex.end(); it++)
2830  alpha[*it] /= sum;
2831 
2832  si.rho /= sum;
2833 
2834  //info ("nSV = %d, nBSV = %d\n", nSV, nBSV);
2835 
2837  f.alpha = alpha;
2838  f.rho = si.rho;
2839  return f;
2840 } /* svm_train_one */
FeatureVectorList x
Definition: svm2.h:65
HTMLReport &__cdecl endl(HTMLReport &htmlReport)
Definition: HTMLReport.cpp:240
static void solve_nu_svr(const svm_problem *prob, const svm_parameter *param, double *alpha, Solver::SolutionInfo *si, RunLog &_log)
Definition: svm2.cpp:2672
__int32 kkint32
Definition: KKBaseTypes.h:88
void solve_c_svc(const svm_problem *prob, const svm_parameter *param, double *alpha, Solver::SolutionInfo *si, double Cp, double Cn, RunLog &_log)
Definition: svm2.cpp:2414
void solve_nu_svc(const svm_problem *prob, const svm_parameter *param, double *alpha, Solver::SolutionInfo *si, RunLog &_log)
Definition: svm2.cpp:2476
RunLog & Level(kkint32 _level)
Definition: RunLog.cpp:220
void solve_epsilon_svr(const svm_problem *prob, const svm_parameter *param, double *alpha, Solver::SolutionInfo *si, RunLog &_log)
Definition: svm2.cpp:2613
const FeatureNumList & SelFeatures() const
Definition: svm2.h:61
void solve_one_class(const svm_problem *prob, const svm_parameter *param, double *alpha, Solver::SolutionInfo *si, RunLog &_log)
Definition: svm2.cpp:2558
kkint32 numTrainExamples
Definition: svm2.h:63
SVM_Type SVM289_MFS::SVM_Type_FromStr ( KKStr  s)

Definition at line 534 of file svm2.cpp.

References C_SVC, EPSILON_SVR, KKB::KKStr::EqualIgnoreCase(), NU_SVC, NU_SVR, ONE_CLASS, KKB::KKStr::operator==(), SVM_NULL, and KKB::KKStr::Upper().

Referenced by SVM289_MFS::svm_parameter::ParseTabDelStr(), SVM289_MFS::svm_parameter::ProcessSvmParameter(), and SVM289_MFS::Svm_Model::ReadXML().

535 {
536  s.Upper ();
537 
538  if ((s.EqualIgnoreCase ("C_SVC")) || (s == "0")) return SVM_Type::C_SVC;
539  if ((s.EqualIgnoreCase ("NU_SVC")) || (s == "1")) return SVM_Type::NU_SVC;
540  if ((s.EqualIgnoreCase ("ONE_CLASS")) || (s == "2")) return SVM_Type::ONE_CLASS;
541  if ((s.EqualIgnoreCase ("EPSILON_SVR")) || (s == "3")) return SVM_Type::EPSILON_SVR;
542  if ((s.EqualIgnoreCase ("NU_SVR")) || (s == "4")) return SVM_Type::NU_SVR;
543 
544  return SVM_Type::SVM_NULL;
545 }
bool EqualIgnoreCase(const KKStr &s2) const
Definition: KKStr.cpp:1250
void Upper()
Converts all characters in string to their Upper case equivalents via &#39;toupper&#39;.
Definition: KKStr.cpp:2461
KKStr SVM289_MFS::SVM_Type_ToStr ( SVM_Type  svmType)

Definition at line 549 of file svm2.cpp.

References C_SVC, EPSILON_SVR, NU_SVC, NU_SVR, and ONE_CLASS.

Referenced by SVM289_MFS::svm_parameter::ToTabDelStr(), and SVM289_MFS::Svm_Model::WriteXML().

550 {
551  switch (svmType)
552  {
553  case SVM_Type::C_SVC: return "c_svc";
554  case SVM_Type::NU_SVC: return "nu_svc";
555  case SVM_Type::ONE_CLASS: return "one_class";
556  case SVM_Type::EPSILON_SVR: return "epsilon_svr";
557  case SVM_Type::NU_SVR: return "nu_svr";
558  }
559  return "NULL";
560 }
template<class T >
void SVM289_MFS::swap ( T &  x,
T &  y 
)
inline

Definition at line 283 of file svm2.h.

283 { T t=x; x=y; y=t; }

Variable Documentation

kkint32 SVM289_MFS::libsvm_version
void(* SVM289_MFS::svm_print_string)(const char *) = &print_string_stdout

Definition at line 604 of file svm2.cpp.