28 using namespace KKMLL;
30 #pragma warning(disable : 4996
) 72 kkint32 memoryConsumedEstimated =
sizeof (SvmModel233)
73 + param.MemoryConsumedEstimated ()
74 + (kkint32)exampleNames.size () * 40;
76 if (
SV) memoryConsumedEstimated +=
sizeof (
svm_node*) *
l;
78 if (
rho) memoryConsumedEstimated +=
l *
sizeof (
double);
82 if (
kValueTable) memoryConsumedEstimated +=
sizeof (
double) *
l;
84 memoryConsumedEstimated +=
sizeof (
svm_node) *
l;
86 return memoryConsumedEstimated;
109 free (SV);
SV = NULL;
110 free (sv_coef);
sv_coef = NULL;
111 free (rho);
rho = NULL;
112 free (label);
label = NULL;
113 free (nSV);
nSV = NULL;
122 free (SVIndex);
SVIndex = NULL;
136 if (svIDX < (kkint32)exampleNames.size ())
137 return exampleNames[svIDX];
139 return "SV" + StrFormatInt (svIDX,
"ZZZ#");
156 kkint32 origPrecision = (kkint32)o.precision ();
161 XmlElementInt32::WriteXML (
nr_class,
"nr_class", o);
165 XmlElementInt32::WriteXML (totalNumSVs,
"totalNumSVs", o);
167 XmlElementArrayDouble::WriteXML (numberOfBinaryClassifiers,
rho,
"rho", o);
171 kkint32 oldPrecision = (kkint32)o.precision ();
173 XmlElementArrayDouble::WriteXML (numberOfBinaryClassifiers,
margin,
"margin", o);
174 o.precision (oldPrecision);
178 XmlElementArrayInt32::WriteXML (
nr_class,
label,
"label", o);
181 XmlElementArrayInt32::WriteXML (
nr_class,
nSV,
"nSV", o);
185 kkint32 totalNumOfElements = 0;
186 for (
kkint32 i = 0; i < totalNumSVs; i++)
191 totalNumOfElements++;
195 totalNumOfElements++;
198 XmlElementInt32::WriteXML (totalNumOfElements,
"totalNumOfElements", o);
202 for (
kkint32 i = 0; i < totalNumSVs; i++)
204 if ((kkint32)exampleNames.size () > i)
205 o <<
"SuportVectorNamed" <<
"\t" << exampleNames[i];
210 for (kkint32 j = 0; j < nr_class - 1; j++)
211 o <<
"\t" << sv_coef[j][i];
216 o <<
"\t" << p->index <<
":" << p->value;
223 o.precision (origPrecision);
240 exampleNames.clear ();
241 kkint32 numberOfBinaryClassifiers = 0;
244 kkint32 totalNumOfElements = 0;
250 delete SV;
SV = NULL;
252 bool errorsFound =
false;
256 while ((!errorsFound) && (!cancelFlag))
263 delete cp; cp = NULL; t = NULL;
287 param.ParseTabDelStr (*(
dynamic_cast<XmlElementKKStrPtr> (e)->Value ()));
302 else if (varName
.EqualIgnoreCase ("rho") && (
typeid (*e) ==
typeid (XmlElementArrayDouble)))
305 XmlElementArrayDoublePtr rhoArray =
dynamic_cast<XmlElementArrayDoublePtr> (e);
306 if (rhoArray->Count () != numberOfBinaryClassifiers)
308 log.Level (-1) << endl
309 <<
"SvmModel233::ReadXML ***ERROR*** 'rho' array incorrect length: expected: " 310 << numberOfBinaryClassifiers <<
" found: " << rhoArray->Count () <<
" elements" 316 rho = rhoArray->TakeOwnership ();
320 else if (varName
.EqualIgnoreCase ("margin") && (
typeid (*e) ==
typeid (XmlElementArrayDouble)))
323 XmlElementArrayDoublePtr marginArray =
dynamic_cast<XmlElementArrayDoublePtr> (e);
324 if (marginArray->Count () != numberOfBinaryClassifiers)
326 log.Level (-1) << endl
327 <<
"SvmModel233::ReadXML ***ERROR*** 'margin' array incorrect length: expected: " 328 << numberOfBinaryClassifiers <<
" found: " << marginArray->Count () <<
" elements" 334 margin = marginArray->TakeOwnership ();
338 else if (varName
.EqualIgnoreCase ("label") && (
typeid (*e) ==
typeid (XmlElementArrayInt32)))
341 XmlElementArrayInt32Ptr labelArray =
dynamic_cast<XmlElementArrayInt32Ptr> (e);
342 if (labelArray->Count () !=
nr_class)
344 log.Level (-1) << endl
345 <<
"SvmModel233::ReadXML ***ERROR*** 'label' array incorrect length: expected: " 346 << nr_class <<
" found: " << labelArray->Count () <<
" elements" 352 label = labelArray->TakeOwnership ();
356 else if (varName
.EqualIgnoreCase ("nSV") && (
typeid (*e) ==
typeid (XmlElementArrayInt32)))
359 XmlElementArrayInt32Ptr nSVArray =
dynamic_cast<XmlElementArrayInt32Ptr> (e);
362 log.Level (-1) << endl
363 <<
"SvmModel233::ReadXML ***ERROR*** 'nSV' array incorrect length: expected: " 364 << nr_class <<
" found: " << nSVArray->Count () <<
" elements" 370 nSV = nSVArray->TakeOwnership ();
377 if (totalNumOfElements < 1)
379 log.Level (-1) << endl
380 <<
"SvmModel233::ReadXML ***ERROR*** Invalid totalNumOfElements: " << totalNumOfElements << endl
391 for (
kkint32 i = 0; i < m; i++)
409 log.Level (-1) << endl
410 <<
"SvmModel233::ReadXML ***ERROR*** Invalid Content: " << lineName << endl
415 else if (numSVsLoaded >= totalNumSVs)
417 log.Level (-1) <<
"SvmModel233::ReadXML ***ERROR*** Exceeding expected number of Support Vectors." << endl;
422 log.Level (-1) << endl <<
"SvmModel233::ReadXML ***ERROR*** 'SV' was no defined LineNum:" << lineName << endl << endl;
430 exampleNames.push_back (p.GetNextToken (
"\t"));
436 SV[numSVsLoaded] = &(
xSpace[numElementsLoaded]);
438 while ((p
.MoreTokens ()) && (numElementsLoaded < (totalNumOfElements - 1)))
445 if (numElementsLoaded >= totalNumOfElements)
447 log.Level (-1) << endl
448 <<
"SvmModel233::ReadXML ***ERROR*** 'numElementsLoaded' is greater than what was defined by 'totalNumOfElements'." << endl
463 log.Level (-1) << endl
464 <<
"SvmModel233::ReadXML ***ERROR*** Unexpected Token[" << t->SectionName () <<
" " << t->VarName () <<
"]" << endl
475 if (numSVsLoaded != totalNumSVs)
477 log.Level (-1) << endl
478 <<
"SvmModel233::ReadXML ***ERROR*** numSVsLoaded[" << numSVsLoaded <<
"] does not match totalNumSVs[" << totalNumSVs <<
"]." << endl
490 valid = (!errorsFound) && (!cancelFlag);
521 typedef float Qfloat;
594 else if(
alpha[i] <= 0)
676 template <
class T>
inline T
min(T x,T y) {
return (
x<
y)?
x:
y; }
680 template <
class T>
inline T
max(T x,T y) {
return (
x>
y)?
x:
y; }
687 template <
class T>
inline void Swap(T& x, T& y) {
T t=
x;
x=
y;
y=
t; }
690 template <
class S,
class T>
inline void clone(T*& dst, S* src,
kkint32 n)
699 #define Malloc(type,n) (type *)malloc((n)*sizeof(type)) 715 void info(
const char *fmt,...) {}
890 memoryConsumedEstimated += ((
sizeof (
double) +
sizeof(
kkint32)) *
nr_weight);
893 memoryConsumedEstimated += (
sizeof (
double) *
dim);
895 return memoryConsumedEstimated;
913 else if (cmd
== "-C")
916 else if (cmd
== "-D")
919 else if (cmd
== "-E")
922 else if ((cmd
== "-G") || (cmd
== "-GAMMA"))
925 else if (cmd
== "-H")
928 else if (cmd
== "-I")
931 else if (cmd
== "-J")
934 else if (cmd
== "-K")
937 else if (cmd
== "-L")
940 else if (cmd
== "-M")
943 else if (cmd
== "-N")
946 else if (cmd
== "-O")
949 else if (cmd
== "-P")
952 else if (cmd
== "-Q")
955 else if (cmd
== "-R")
958 else if (cmd
== "-S")
961 else if (cmd
== "-T")
964 else if (cmd
== "-U")
967 else if (cmd
== "-W")
970 weight_label = KKB::kkReallocateArray (weight_label, nr_weight - 1, nr_weight);
971 weight = KKB::kkReallocateArray (weight, nr_weight - 1, nr_weight);
1027 if (field
[0
] !=
'-')
1029 leftOverStr <<
" " << field;
1035 if (_paramStr
.Len () > 0)
1037 if (_paramStr
[0
] !=
'-')
1041 valueNum = atof (value.Str ());
1043 bool parmUsed =
false;
1047 leftOverStr <<
" " << field <<
" " << value;
1054 leftOverStr <<
" " << field <<
" " << value;
1062 _paramStr
= leftOverStr;
1073 cmdStr <<
"-a " <<
numSVM <<
" " 1075 <<
"-c " <<
C <<
" " 1076 <<
"-d " <<
degree <<
" " 1077 <<
"-e " <<
eps <<
" " 1078 <<
"-g " <<
gamma <<
" " 1081 <<
"-j " <<
hist <<
" " 1083 <<
"-l " <<
cBoost <<
" " 1085 <<
"-n " <<
nu <<
" ";
1090 cmdStr <<
"-p " <<
p <<
" " 1092 <<
"-r " <<
coef0 <<
" " 1095 <<
"-u " <<
A <<
" ";
1110 result <<
"svm_type" <<
"\t" <<
svm_type <<
"\t" 1112 <<
"degree" <<
"\t" <<
degree <<
"\t" 1113 <<
"gamma" <<
"\t" <<
gamma <<
"\t" 1114 <<
"coef0" <<
"\t" <<
coef0 <<
"\t" 1115 <<
"cache_size" <<
"\t" <<
cache_size <<
"\t" 1116 <<
"eps" <<
"\t" <<
eps <<
"\t" 1117 <<
"C" <<
"\t" <<
C <<
"\t";
1119 result <<
"nr_weight" <<
"\t" <<
nr_weight <<
"\t";
1124 if (x > 0) result <<
",";
1131 if (x > 0) result <<
",";
1137 result <<
"nu" <<
"\t" <<
nu <<
"\t" 1138 <<
"p" <<
"\t" <<
p <<
"\t" 1139 <<
"shrinking" <<
"\t" <<
shrinking <<
"\t" 1141 <<
"numSVM" <<
"\t" <<
numSVM <<
"\t" 1142 <<
"sampleSV" <<
"\t" <<
sampleSV <<
"\t" 1143 <<
"hist" <<
"\t" <<
hist <<
"\t" 1144 <<
"boosting" <<
"\t" <<
boosting <<
"\t" 1145 <<
"cBoost" <<
"\t" <<
cBoost <<
"\t" 1146 <<
"dimSelect" <<
"\t" <<
dimSelect <<
"\t" 1147 <<
"dim" <<
"\t" <<
dim <<
"\t";
1151 result <<
"featureWeight" <<
"\t";
1161 result <<
"confidence" <<
"\t" <<
confidence <<
"\t" 1162 <<
"A" <<
"\t" <<
A <<
"\t" 1163 <<
"nr_class" <<
"\t" <<
nr_class <<
"\t" 1164 <<
"threshold" <<
"\t" <<
threshold <<
"\t" 1165 <<
"sample" <<
"\t" <<
sample <<
"\t";
1210 if (field
== "svm_type")
1213 else if (field
== "kernel_type")
1216 else if (field
== "degree")
1219 else if (field
== "gamma")
1222 else if (field
== "coef0")
1225 else if (field
== "cache_size")
1228 else if (field
== "eps")
1231 else if (field
== "C")
1234 else if (field
== "nr_weight")
1258 else if (field
== "nu")
1261 else if (field
== "p")
1264 else if (field
== "shrinking")
1267 else if (field
== "probability")
1270 else if (field
== "numSVM")
1273 else if (field
== "sampleSV")
1276 else if (field
== "hist")
1279 else if (field
== "boosting")
1282 else if (field
== "cBoost")
1285 else if (field
== "dimSelect")
1288 else if (field
== "dim")
1291 else if (field
== "featureWeight")
1295 for (x = 0; x <
dim; x++)
1301 else if (field
== "confidence" )
1304 else if (field
== "A")
1307 else if (field
== "nr_class")
1310 else if (field
== "threshold")
1313 else if (field
== "sample")
1339 for (
kkint32 zed = 0; zed <
l; zed++)
1387 head_t *prev, *next;
1394 void lru_delete (head_t *h);
1395 void lru_insert (head_t *h);
1408 head = (head_t *)calloc (l,
sizeof (head_t));
1409 size /=
sizeof (Qfloat);
1410 size -= l *
sizeof (head_t) /
sizeof (Qfloat);
1411 lru_head.next = lru_head.prev = &lru_head;
1418 for (head_t *h = lru_head.next; h != &lru_head; h = h->next)
1428 h->prev->next = h->next;
1429 h->next->prev = h->prev;
1437 h->next = &lru_head;
1438 h->prev = lru_head.prev;
1450 head_t *h = &head[index];
1462 head_t *old = lru_head.next;
1471 h->data = (Qfloat *)realloc(h->data,
sizeof(Qfloat)*len);
1489 if(head[i].len) lru_delete(&head[i]);
1490 if(head[j].len) lru_delete(&head[j]);
1491 Swap(head[i].data,head[j].data);
1492 Swap(head[i].len,head[j].len);
1493 if(head[i].len) lru_insert(&head[i]);
1494 if(head[j].len) lru_insert(&head[j]);
1497 for(head_t *h = lru_head.next; h!=&lru_head; h=h->next)
1502 Swap(h->data[i],h->data[j]);
1525 class SVM233::Kernel {
1542 const double* featureWeight
1550 if(x_square) Swap(x_square[i],x_square[j]);
1563 const double degree;
1568 double *featureWeight;
1571 static double dotSubspace(
const svm_node *px,
const svm_node *py,
const double *featureWeight);
1575 return dot(x[i],x[j]);
1579 return dotSubspace(x[i],x[j], featureWeight);
1583 return pow(gamma*dot(x[i],x[j])+coef0,degree);
1587 return pow(gamma*dotSubspace(x[i],x[j], featureWeight)+coef0,degree);
1591 return exp(-gamma*(x_square[i]+x_square[j]-2*dot(x[i],x[j])));
1595 return exp(-gamma*(x_square[i]+x_square[j]-2*dotSubspace(x[i],x[j],featureWeight)));
1599 return tanh(gamma*dot(x[i],x[j])+coef0);
1603 return tanh(gamma*dotSubspace(x[i],x[j],featureWeight)+coef0);
1624 featureWeight =
new double[dim];
1625 std::copy (param.featureWeight, param.featureWeight + dim, featureWeight);
1628 switch (kernel_type)
1661 if (kernel_type ==
RBF)
1663 x_square =
new double[l];
1667 x_square[i] = dotSubspace(x[i],x[i],featureWeight);
1669 x_square[i] = dot(x[i],x[i]);
1679 delete[] featureWeight;
1794 return exp (-param
.gamma * sum);
1808 const double* featureWeight
1814 return dotSubspace(x,y,featureWeight);
1826 sum+=d*d*featureWeight[x
->index-1];
1836 sum+=d*d*featureWeight[y
->index-1];
1842 sum+=d*d*featureWeight[x
->index-1];
1851 sum+=d*d*featureWeight[x
->index-1];
1861 sum+=d*d*featureWeight[y
->index-1];
1867 return exp (-param
.gamma * sum);
1870 return tanh(param
.gamma*dotSubspace(x,y, featureWeight)+param
.coef0);
1886 Swap(alpha_status[i],alpha_status[j]);
1887 Swap(alpha[i],alpha[j]);
1889 Swap(active_set[i],active_set[j]);
1890 Swap(G_bar[i],G_bar[j]);
1912 double alpha_i =
alpha[i];
1914 G[j] += alpha_i * Q_i[j];
1935 clone (alpha,alpha_,l);
1959 G_bar =
new double[l];
1970 double alpha_i =
alpha[i];
1973 G[j] += alpha_i*Q_i[j];
1983 kkint32 counter = min ((kkint32)l, (kkint32)1000) + 1;
1991 counter = min(l,1000);
2020 double old_alpha_i =
alpha[i];
2021 double old_alpha_j =
alpha[j];
2025 double delta = (-G[i]-G[j])/max(Q_i[i]+Q_j[j]+2*Q_i[j],(Qfloat)0);
2046 if(diff > C_i - C_j)
2051 alpha[j] = C_i - diff;
2059 alpha[i] = C_j + diff;
2065 double delta = (G[i]-G[j])/max(Q_i[i]+Q_j[j]-2*Q_i[j],(Qfloat)0);
2074 alpha[j] = sum - C_i;
2090 alpha[i] = sum - C_j;
2105 double delta_alpha_i =
alpha[i] - old_alpha_i;
2106 double delta_alpha_j =
alpha[j] - old_alpha_j;
2110 G[k] += Q_i[k]*delta_alpha_i + Q_j[k]*delta_alpha_j;
2126 G_bar[k] -= C_i * Q_i[k];
2129 G_bar[k] += C_i * Q_i[k];
2137 G_bar[k] -= C_j * Q_j[k];
2140 G_bar[k] += C_j * Q_j[k];
2154 v +=
alpha[i] * (
G[i] +
b[i]);
2173 info("\noptimization finished, #iter = %d\n",iter
);
2198 double* C_ =
new double[l];
2200 C_[i] = (y_[i] > 0 ? Cp : Cn);
2218 double Gmax1 = -
INF;
2221 double Gmax2 = -
INF;
2266 if(Gmax1+Gmax2 <
eps)
2282 double Gm1 = -
y[j]*
G[j];
2283 double Gm2 =
y[i]*
G[i];
2293 if(-
G[k] >= Gm1)
continue;
2295 else if (-
G[k] >= Gm2)
continue;
2301 if(
G[k] >= Gm2)
continue;
2303 else if(
G[k] >= Gm1)
continue;
2325 if(-
G[k] < Gm1)
continue;
2327 else if(-
G[k] < Gm2)
continue;
2333 if(
G[k] < Gm2)
continue;
2335 else if(
G[k] < Gm1)
continue;
2352 double ub =
INF, lb = -
INF, sum_free = 0;
2355 double yG =
y[i]*
G[i];
2379 r = sum_free/nr_free;
2417 double calculate_rho();
2418 void do_shrinking();
2430 double Gmax1 = -
INF;
2433 double Gmax2 = -
INF;
2436 double Gmax3 = -
INF;
2439 double Gmax4 = -
INF;
2484 if(max(Gmax1+Gmax2,Gmax3+Gmax4) < eps)
2487 if(Gmax1+Gmax2 > Gmax3+Gmax4)
2505 double Gmax1 = -
INF;
2506 double Gmax2 = -
INF;
2507 double Gmax3 = -
INF;
2508 double Gmax4 = -
INF;
2517 if(-
G[k] > Gmax1) Gmax1 = -
G[k];
2519 else if(-
G[k] > Gmax3) Gmax3 = -
G[k];
2525 if(
G[k] > Gmax2) Gmax2 =
G[k];
2527 else if(
G[k] > Gmax4) Gmax4 =
G[k];
2531 double Gm1 = -Gmax2;
2532 double Gm2 = -Gmax1;
2533 double Gm3 = -Gmax4;
2534 double Gm4 = -Gmax3;
2542 if(-
G[k] >= Gm1)
continue;
2544 else if(-
G[k] >= Gm3)
continue;
2550 if(
G[k] >= Gm2)
continue;
2552 else if(
G[k] >= Gm4)
continue;
2563 if(unshrinked || max(-(Gm1+Gm2),-(Gm3+Gm4)) > eps*10)
return;
2574 if(-
G[k] < Gm1)
continue;
2576 else if(-
G[k] < Gm3)
continue;
2582 if(
G[k] < Gm2)
continue;
2584 else if(
G[k] < Gm4)
continue;
2599 kkint32 nr_free1 = 0,nr_free2 = 0;
2600 double ub1 =
INF, ub2 =
INF;
2601 double lb1 = -
INF, lb2 = -
INF;
2602 double sum_free1 = 0, sum_free2 = 0;
2609 ub1 = min(ub1,G[i]);
2611 lb1 = max(lb1,G[i]);
2621 ub2 = min(ub2,G[i]);
2623 lb2 = max(lb2,G[i]);
2634 r1 = sum_free1/nr_free1;
2639 r2 = sum_free2/nr_free2;
2669 for (
kkint32 j = start; j < len; j++)
2714 for(
kkint32 j=start;j<len;j++)
2748 sign =
new schar[2*l];
2757 buffer[0] =
new Qfloat[2*l];
2758 buffer[1] =
new Qfloat[2*l];
2764 Swap(sign[i],sign[j]);
2765 Swap(index[i],index[j]);
2780 Qfloat *buf = buffer[next_buffer];
2781 next_buffer = 1 - next_buffer;
2784 buf[j] = si * sign[j] * data[index[j]];
2820 double *minus_ones =
new double[l];
2850 sum_alpha += alpha[i];
2857 delete[] minus_ones;
2872 double *minus_ones =
new double[l];
2877 for (i = 0; i < l; i++)
2900 for (i = 0; i < l; i++)
2901 sum_alpha += alpha[i];
2905 for (i = 0; i < l; i++)
2908 delete[] minus_ones;
2922 double nu = param
->nu;
2932 double sum_pos = nu*l/2;
2933 double sum_neg = nu*l/2;
2938 alpha[i] = min(1.0,sum_pos);
2939 sum_pos -= alpha[i];
2943 alpha[i] = min(1.0,sum_neg);
2944 sum_neg -= alpha[i];
2947 double *zeros =
new double[l];
2980 double *zeros =
new double[l];
2988 alpha[n] = param
->nu * prob
->l - n;
3015 double *alpha2 =
new double[2*l];
3016 double *linear_term =
new double[2*l];
3023 linear_term[i] = param
->p - prob
->y[i];
3027 linear_term[i+l] = param
->p + prob
->y[i];
3044 double sum_alpha = 0;
3047 alpha[i] = alpha2[i] - alpha2[i+l];
3048 sum_alpha += fabs(alpha[i]);
3053 delete[] linear_term;
3066 double C = param
->C;
3067 double *alpha2 =
new double[2*l];
3068 double *linear_term =
new double[2*l];
3072 double sum = C * param
->nu * l / 2;
3075 alpha2[i] = alpha2[i+l] = min(sum,C);
3078 linear_term[i] = - prob
->y[i];
3081 linear_term[i+l] = prob
->y[i];
3091 for (i = 0; i < l; i++)
3093 alpha[i] = alpha2[i] - alpha2[i + l];
3097 delete[] linear_term;
3111 double *alpha =
Malloc(
double,prob->l);
3128 if (fabs (alpha[i]) > 0)
3133 if (fabs (alpha[i]) >= prob->W[i])
3138 if (fabs(alpha[i]) >= prob->W[i])
3144 info("nSV = %d, nBSV = %d\n",nSV
,nBSV
);
3158 std::set<kkint32>& BSVIndex
3161 double *alpha =
Malloc (
double, prob->l);
3190 std::vector<kkint32> SVIndex;
3197 if (fabs(alpha[i]) > 0)
3200 SVIndex.push_back (i);
3203 if (fabs (alpha[i]) >= si.upper_bound_p)
3206 BSVIndex.insert (prob->index[i]);
3211 if (fabs(alpha[i]) >= si.upper_bound_n)
3214 BSVIndex.insert (prob->index[i]);
3220 info("nSV = %d, nBSV = %d\n",nSV
,nBSV
);
3223 std::vector<kkint32>::iterator it,it2;
3225 for (it = SVIndex.begin(); it < SVIndex.end(); it++)
3227 for (it2 = SVIndex.begin(); it2 < SVIndex.end(); it2++)
3247 sum+= prob
->y[k]*prob
->y[kk]*alpha[k]*alpha[kk]*kvalue;
3251 sum /= SVIndex.size();
3254 for (it = SVIndex.begin(); it < SVIndex.end(); it++)
3291 std::copy (param->featureWeight,
3292 param->featureWeight + model->dim,
3293 model->featureWeight
3306 model->sv_coef =
Malloc(
double *,1);
3308 model->rho =
Malloc(
double, 1);
3314 for (i = 0; i < prob->l; i++)
3315 if (fabs (f.alpha[i]) > 0) ++nSV;
3318 model->SV =
Malloc(svm_node *,nSV);
3322 model->nonSVIndex =
Malloc(kkint32, model->numNonSV);
3323 model->SVIndex=
Malloc(kkint32, nSV);
3326 model->sv_coef[0] =
Malloc (
double, nSV);
3329 for (i = 0; i < prob
->l; i++)
3331 if (fabs (f.alpha[i]) > 0)
3333 model->SVIndex[j] = prob->index[i];
3334 model->SV[j] = prob->x[i];
3335 model->sv_coef[0][j] = f.alpha[i];
3340 model->nonSVIndex[jj++]=prob->index[i];
3357 bool weHaveExampleNames = ((kkint32)prob->exampleNames.size () >= prob->l);
3360 for (i = 0; i < l; i++)
3364 for (j = 0; j < nr_class; j++)
3366 if (this_label == label[j])
3376 if (nr_class == max_nr_class)
3379 label = (kkint32 *)realloc(label,max_nr_class*
sizeof(kkint32));
3380 count = (kkint32 *)realloc(count,max_nr_class*
sizeof(kkint32));
3382 label[nr_class] = this_label;
3383 count[nr_class] = 1;
3392 for (i = 1; i < nr_class; i++)
3393 start[i] = start[i-1] + count[i-1];
3398 if (prob->W != NULL)
3400 std::vector<kkint32> reindex(l);
3402 for (i = 0; i < l; i++)
3404 x[start[index[i]]] = prob
->x[i];
3405 reindex[start[index[i]]] = prob->index[i];
3407 W[start[index[i]]] = prob
->W[i];
3412 for (i = 1; i < nr_class; i++)
3413 start[i] = start[i - 1] + count[i - 1];
3417 double *weighted_C =
Malloc(
double, nr_class);
3418 for (i = 0; i < nr_class; i++)
3419 weighted_C[i] = param
->C;
3425 for (j = 0; j < nr_class; j++)
3430 fprintf (stderr,
"warning: class label %d specified in weight is not found\n", param->weight_label[i]);
3437 bool *nonzero =
Malloc(
bool,l);
3444 for(i=0;i<nr_class;i++)
3446 for(
kkint32 j=i+1;j<nr_class;j++)
3451 kkint32 si = start[i], sj = start[j];
3452 kkint32 ci = count[i], cj = count[j];
3454 sub_prob.x =
Malloc(svm_node *,sub_prob.l);
3455 sub_prob.y =
Malloc(
double, sub_prob.l);
3457 sub_prob.W =
Malloc(
double, sub_prob.l);
3458 sub_prob.index =
Malloc(kkint32, sub_prob.l);
3463 sub_prob
.x[k] = x[si+k];
3466 sub_prob
.W[k] = W[si+k];
3467 sub_prob.index[k]=reindex[si+k];
3471 sub_prob
.x[ci+k] = x[sj+k];
3472 sub_prob
.y[ci+k] = -1;
3474 sub_prob
.W[ci+k] = W[sj+k];
3475 sub_prob.index[ci+k]=reindex[sj+k];
3481 f[p] = svm_train_one(&sub_prob,param,weighted_C[i],weighted_C[j],model->BSVIndex);
3493 for (k = 0; k < ci; k++)
3495 if(!nonzero[si+k] && fabs(f[p].alpha[k]) > 0)
3496 nonzero[si+k] =
true;
3499 for (k = 0; k < cj; k++)
3501 if (!nonzero[sj+k] && fabs(f[p].alpha[ci+k]) > 0)
3502 nonzero[sj+k] =
true;
3507 if (sub_prob.W != NULL)
3509 free(sub_prob.index);
3518 model->label =
Malloc (kkint32, nr_class);
3519 for (i = 0; i < nr_class; i++)
3522 model->rho =
Malloc (
double, nr_class*(nr_class-1)/2);
3523 for (i = 0; i < nr_class * (nr_class - 1) / 2; i++)
3528 model->nSV =
Malloc (kkint32,nr_class);
3529 for (i=0; i < nr_class; i++)
3532 for (
kkint32 j = 0; j < count[i]; j++)
3534 if (nonzero[start[i]+j])
3539 model
->nSV[i] = nSV;
3544 info("Total nSV = %d\n",total_sv
);
3546 model
->l = total_sv;
3547 model->SV =
Malloc (svm_node *,total_sv);
3548 model->exampleNames.clear ();
3550 if (weHaveExampleNames)
3552 for (kkint32 zed = 0; zed < total_sv; zed++)
3553 model->exampleNames.push_back (
"");
3560 model->SVIndex =
Malloc(kkint32, total_sv);
3561 model->nonSVIndex =
Malloc(kkint32, model->numNonSV);
3568 for (i = 0; i < l; i++)
3572 model
->SV[p] = x[i];
3574 if (weHaveExampleNames)
3575 model->exampleNames[p] = prob->exampleNames[i];
3588 for(i=1;i<nr_class;i++)
3589 nz_start[i] = nz_start[i-1]+nz_count[i-1];
3591 model->sv_coef =
Malloc(
double *,nr_class-1);
3592 for(i=0;i<nr_class-1;i++)
3594 model->sv_coef[i] =
Malloc(
double,total_sv);
3595 for (
kkint32 zed = 0; zed < total_sv; zed++)
3600 for (i = 0; i < nr_class; i++)
3601 for (
kkint32 j = i + 1; j < nr_class; j++)
3614 for (k = 0; k < ci; k++)
3615 if (nonzero[si + k])
3619 cerr << endl << endl
3620 <<
"svm_train ***ERROR*** Alpha Valude Being Overwritten." << endl
3632 cerr << endl << endl
3633 <<
"svm_train ***ERROR*** Alpha Valude Being Overwritten." << endl
3651 for(i=0;i<nr_class*(nr_class-1)/2;i++)
3689 "c_svc",
"nu_svc",
"one_class",
"epsilon_svr",
"nu_svr",NULL
3696 "linear",
"polynomial",
"rbf",
"sigmoid",NULL
3708 kkint32 numberOfBinaryClassifiers = nr_class * (nr_class - 1) / 2;
3710 kkint32 origPrecision = (kkint32)o.precision ();
3713 o <<
"<Svm233>" << endl;
3715 o <<
"Parameters" <<
"\t" << param.ToTabDelStr () << endl;
3716 o <<
"NumOfClasses" <<
"\t" << nr_class << endl;
3719 o <<
"TotalNumOfSupportVectors" <<
"\t" << totalNumSVs << endl;
3723 for (kkint32 i = 0; i < numberOfBinaryClassifiers; i++)
3724 o <<
"\t" << model->rho[i];
3730 kkint32 oldPrecision = (kkint32)o.precision ();
3733 for (kkint32 i = 0; i < numberOfBinaryClassifiers; i++)
3734 o <<
"\t" << model->margin[i];
3736 o.precision (oldPrecision);
3742 for (kkint32 i = 0; i < nr_class; i++)
3743 o <<
"\t" << model->label[i];
3750 for (kkint32 i = 0; i < nr_class; i++)
3751 o <<
"\t" << model->nSV[i];
3755 const double *
const *sv_coef = model
->sv_coef;
3760 kkint32 totalNumOfElements = 0;
3761 for (
kkint32 i = 0; i < totalNumSVs; i++)
3766 totalNumOfElements++;
3770 totalNumOfElements++;
3772 o <<
"TOTALNUMOFELEMENTS" <<
"\t" << totalNumOfElements << endl;
3775 for (
kkint32 i = 0; i < totalNumSVs; i++)
3777 if ((kkint32)model->exampleNames.size () > i)
3778 o <<
"SuportVectorNamed" <<
"\t" << model->exampleNames[i];
3780 o <<
"SuportVector";
3783 for (kkint32 j = 0; j < nr_class - 1; j++)
3784 o <<
"\t" << sv_coef[j][i];
3789 o <<
"\t" << p->index <<
":" << p->value;
3797 o <<
"</Svm233>" << endl;
3799 o.precision (origPrecision);
3811 kkint32 bullAllocSize = 500000;
3812 char* buff =
new char[bullAllocSize];
3816 kkint32 numElementsLoaded = 0;
3818 kkint32 totalNumOfElements = -1;
3822 bool validFormat =
true;
3829 while (f.getline (buff, bullAllocSize))
3832 line.TrimLeft (
"\n\r\t ");
3833 line.TrimRight (
"\n\r\t ");
3835 if ((line.Len () > 0) && (line.SubStrPart (0, 1) !=
"//"))
3846 log.Level (-1) << endl << endl
3847 <<
"SVM233::Svm_Load_Model ***ERROR*** The '<Svm233>' header is missing. Not a valid model." << endl
3849 delete model; model = NULL;
3850 delete[] buff; buff = NULL;
3858 while (f.getline (buff, bullAllocSize))
3860 KKStrParser line (buff);
3861 line.SkipWhiteSpace (
" ");
3863 KKStr lineName = line.GetNextToken ();
3865 if (lineName.EqualIgnoreCase (
"<SvmMachine>"))
3868 if (lineName.EqualIgnoreCase (
"</Svm233>"))
3873 if (lineName ==
"PARAMETERS")
3875 model->param.ParseTabDelStr (line.GetRestOfLine ());
3878 else if (lineName ==
"NUMOFCLASSES")
3880 numOfClasses = line.GetNextTokenInt (
"\t");
3881 model->nr_class = numOfClasses;
3884 else if (lineName ==
"TOTALNUMOFSUPPORTVECTORS")
3886 totalNumSVs = line.GetNextTokenInt (
"\t");
3887 model->l = totalNumSVs;
3890 else if (lineName ==
"RHO")
3892 kkint32 n = numOfClasses * (numOfClasses - 1) / 2;
3893 model->rho =
Malloc (
double, n);
3894 for (kkint32 i = 0; i < n; i++)
3895 model->rho[i] = line.GetNextTokenDouble (
"\t");
3898 else if (lineName ==
"LABEL")
3900 model->label =
Malloc (kkint32, numOfClasses);
3901 for (kkint32 i=0; i < numOfClasses; i++)
3902 model->label[i] = line.GetNextTokenInt (
"\t");
3906 else if (lineName ==
"NR_SV")
3908 model->nSV =
Malloc(kkint32, numOfClasses);
3909 for (kkint32 i = 0; i < numOfClasses; i++)
3910 model->nSV[i] = line.GetNextTokenInt (
"\t");
3914 else if (lineName.EqualIgnoreCase (
"Margins"))
3916 kkint32 n = numOfClasses * (numOfClasses - 1) / 2;
3917 delete model->margin;
3918 model->margin =
new double[n];
3919 for (kkint32 i = 0; i < n; i++)
3920 model->margin[i] = line.GetNextTokenDouble (
"\t");
3923 else if (lineName ==
"TOTALNUMOFELEMENTS")
3925 totalNumOfElements = line.GetNextTokenInt (
"\t");
3927 kkint32 m = model->nr_class - 1;
3928 kkint32 l = model->l;
3929 model->sv_coef =
Malloc (
double*, m);
3931 for (kkint32 i = 0; i < m; i++)
3932 model->sv_coef[i] =
Malloc (
double, l);
3934 model->SV =
Malloc (svm_node*, l);
3935 x_space =
new svm_node[totalNumOfElements];
3936 model->xSpace = x_space;
3937 model->weOwnXspace =
true;
3940 else if (lineName.EqualIgnoreCase (
"SUPORTVECTOR") || lineName.EqualIgnoreCase (
"SuportVectorNamed"))
3942 if (numSVsLoaded >= totalNumSVs)
3944 log.Level (-1) << endl << endl << endl
3945 <<
"SvmLoadModel **** ERROR **** There are more 'SupportVector' lines defined that there should be." << endl
3947 validFormat =
false;
3951 if (totalNumOfElements < 1)
3953 log.Level (-1) << endl << endl << endl
3954 <<
"SvmLoadModel **** ERROR **** 'totalNumOfElements' was not defined." << endl
3956 validFormat =
false;
3960 if (lineName.EqualIgnoreCase (
"SuportVectorNamed"))
3963 model->exampleNames.push_back (line.GetNextToken (
"\t"));
3966 for (kkint32 j = 0; j < numOfClasses - 1; j++)
3967 model->sv_coef[j][numSVsLoaded] = line.GetNextTokenDouble (
"\t");
3969 model->SV[numSVsLoaded] = &(x_space[numElementsLoaded]);
3970 while ((line.MoreTokens ()) && (numElementsLoaded < (totalNumOfElements - 1)))
3972 x_space[numElementsLoaded].index = line.GetNextTokenInt (
":");
3973 x_space[numElementsLoaded].value = line.GetNextTokenDouble (
"\t");
3974 numElementsLoaded++;
3977 if (numElementsLoaded >= totalNumOfElements)
3979 log.Level (-1) << endl << endl << endl
3980 <<
"SvmLoadModel **** ERROR **** 'numElementsLoaded' is greater than what was defined by 'totalNumOfElements'." << endl
3982 validFormat =
false;
3986 x_space[numElementsLoaded].index = -1;
3987 x_space[numElementsLoaded].value = 0.0;
3988 numElementsLoaded++;
3996 if (numSVsLoaded != totalNumSVs)
3998 log.Level (-1) << endl << endl << endl
3999 <<
"SvmLoadModel ***ERROR*** numSVsLoaded[" << numSVsLoaded <<
"] does not agree with totalNumSVs[" << totalNumSVs <<
"]" << endl
4001 validFormat =
false;
4004 if (numElementsLoaded != totalNumOfElements)
4006 log.Level (-1) << endl << endl << endl
4007 <<
"SvmLoadModel ***ERROR*** numElementsLoaded[" << numElementsLoaded <<
"] does not agree with totalNumOfElements[" << totalNumOfElements <<
"]" << endl
4009 validFormat =
false;
4012 else if (numOfClasses < 1)
4014 log.Level (-1) << endl << endl << endl
4015 <<
"SvmLoadModel ***ERROR*** numOfClasses was not specified." << endl
4017 validFormat =
false;
4045 std::vector<
double>& dist,
4046 std::vector<kkint32>& winners,
4047 kkint32 excludeSupportVectorIDX
4052 winners.erase (winners.begin (), winners.end ());
4064 if (i == excludeSupportVectorIDX)
4073 sum -= model
->rho[0];
4075 double returnVal = 0.0;
4078 returnVal = (sum > 0) ? 1:-1;
4082 winners.push_back ((kkint32)returnVal);
4090 std::vector<kkint32> voteTable (nr_class * nr_class, 0);
4099 if (i == excludeSupportVectorIDX)
4116 for (i = 1; i < nr_class; i++)
4117 start[i] = start[i-1] + model
->nSV[i-1];
4121 for (i = 0; i < nr_class; i++)
4126 std::vector<
double> distTable (nr_class * nr_class, 0);
4128 for (i = 0; i < nr_class; i++)
4130 for (
kkint32 j = i + 1; j < nr_class; j++)
4142 for (k = 0; k < ci; k++)
4143 sum += coef1[si + k] * kvalue[si + k];
4145 for (k = 0; k < cj; k++)
4146 sum += coef2[sj + k] * kvalue[sj + k];
4148 sum -= model
->rho[p];
4152 voteTable[i * nr_class + j]++;
4157 voteTable[j * nr_class + i]++;
4164 distTable[row * nr_class + col] = sum;
4165 distTable[col * nr_class + row] = (-1.0 * sum);
4176 for (
kkint32 i = 0; i < nr_class - 1; i++)
4178 for (
kkint32 j=i+1; j< nr_class; j++)
4180 dist[p++] = distTable[i * nr_class + j];
4186 winners.erase (winners.begin (), winners.end ());
4189 winners.push_back (model->label[0]);
4191 for (i = 1; i < nr_class; i++)
4193 if (vote[i] > winningAmt)
4195 winners.erase (winners.begin (), winners.end ());
4197 winningAmt = vote[i];
4198 winners.push_back (model->label[i]);
4200 else if (vote[i] == winningAmt)
4202 winners.push_back (model->label[i]);
4211 return winningLabel;
4221 kkint32 excludeSupportVectorIDX
4238 if (i == excludeSupportVectorIDX)
4247 sum -= model
->rho[0];
4249 double returnVal = 0.0;
4252 returnVal = (sum > 0) ? 1:-1;
4266 <<
"svm_predictTwoClasses ***ERROR*** nr_class[" << nr_class <<
"] != 2" << std::endl
4278 for (i = 0; i < l; i++)
4280 if (i == excludeSupportVectorIDX)
4288 for (i = 0; i < l; i++)
4290 if (i == excludeSupportVectorIDX)
4299 for (i = 1; i < nr_class; i++)
4300 start[i] = start[i-1] + model
->nSV[i-1];
4317 for (k = 0; k < ci; k++)
4318 sum += coef1[si + k] * kvalue[si + k];
4320 for (k = 0; k < cj; k++)
4321 sum += coef2[sj + k] * kvalue[sj + k];
4323 sum -= model
->rho[p];
4341 double winningLabel = (
double)model
->label[winner];
4343 return winningLabel;
4357 kkint32 excludeSupportVectorIDX
4371 printf (
"\n\n\n svm_predictTwoClasses *** ERROR *** NumOf Classes != 2\n\n");
4378 <<
"SVM233::svm_BuildProbFromTwoClassModel ***ERROR*** model->dim[" << model->dim <<
"] > 0" << std::endl
4390 if ((excludeSupportVectorIDX >= 0) && (excludeSupportVectorIDX < l))
4393 newProb
->y =
new double[newProb
->l];
4398 for (
kkint32 svIDX = 0; svIDX < l; svIDX++)
4400 if (svIDX == excludeSupportVectorIDX)
4403 if (svIDX < numSVsFirstClass)
4404 newProb
->y[newIDX] = 0;
4406 newProb
->y[newIDX] = 1;
4408 newProb
->index[newIDX] = newIDX;
4409 newProb->exampleNames.push_back (model->exampleNames[svIDX]);
4423 for (
kkint32 fnIDX = 0; fnIDX < numElements; fnIDX++)
4455 if (svm_type !=
C_SVC &&
4461 return "unknown svm type";
4466 if (kernel_type !=
LINEAR &&
4467 kernel_type !=
POLY &&
4468 kernel_type !=
RBF &&
4471 return "unknown kernel type";
4476 return "cache_size <= 0";
4481 if(svm_type ==
C_SVC ||
4491 return "nu < 0 or nu > 1";
4499 return "shrinking != 0 and shrinking != 1";
4517 for(j=0;j<nr_class;j++)
4518 if(this_label == label[j])
4525 if(nr_class == max_nr_class)
4528 label = (kkint32 *)realloc(label,max_nr_class*
sizeof(kkint32));
4529 count = (kkint32 *)realloc(count,max_nr_class*
sizeof(kkint32));
4531 label[nr_class] = this_label;
4532 count[nr_class] = 1;
4537 for(i=0;i<nr_class;i++)
4540 for(
kkint32 j=i+1;j<nr_class;j++)
4543 if(param->nu*(n1+n2)/2 > min(n1,n2))
4547 return "specified nu is infeasible";
4680 model
->margin =
new double[nr_class * (nr_class - 1) / 2];
4682 std::vector<kkint32> start(nr_class,0);
4684 for(i=1;i<nr_class;i++)
4685 start[i] = start[i-1]+model->nSV[i-1];
4688 for (i = 0; i < nr_class; i++)
4689 for (
kkint32 j = i + 1; j < nr_class; j++)
4703 for (k = 0; k < ci; k++)
4707 trace += coef1[si+k]*coef1[si+k]*kvalue;
4709 for(
kkint32 kk=k+1; kk<ci; kk++)
4713 sum+= coef1[si+k]*coef1[si+kk]*kvalue;
4718 for(
kkint32 kk=0; kk<cj; kk++)
4726 sum += coef1[si+k]*coef2[sj+kk]*kvalue;
4732 for (k=0; k<cj-1; k++)
4736 trace += coef2[sj + k] * coef2[sj + k] * kvalue;
4738 for(
kkint32 kk=k+1; kk<cj; kk++)
4746 sum += coef2[sj + k] * coef2[sj + kk] * kvalue;
4750 double marginSqr = 2 * sum + trace;
4751 if (marginSqr <= 0.0)
4753 cerr << endl << endl
4754 <<
"SVM233::svm_margin ***ERROR*** the computed margin <= 0.0. This is very bad." << endl
4761 model
->margin[p] = sqrt(marginSqr);
4786 KKStr errMsg =
"svm_GetupportVectorsStatistics ***ERROR*** (model == NULL). Model was not defined.";
4787 cerr << endl << endl << errMsg << endl << endl;
4796 cerr << endl << endl
4797 <<
"svm_GetupportVectorsStatistics ***ERROR*** This function does not support for SVM Type[" << model->param.svm_type <<
"]" << endl
4814 for (i = 1; i < nr_class; i++)
4815 start[i] = start[i-1] + model
->nSV[i-1];
4818 for (i = 0; i < nr_class; i++)
4820 for (
kkint32 j = i + 1; j < nr_class; j++)
4832 for (k = 0; k < ci; k++)
4833 if (coef1[si + k] != 0.0)
4836 for (k = 0; k < cj; k++)
4837 if (coef2[sj + k] != 0.0)
KKStr(kkint32 size)
Creates a KKStr object that pre-allocates space for 'size' characters.
XmlTag(const KKStr &_name, TagTypes _tagType)
kkint32 MemoryConsumedEstimated() const
bool EqualIgnoreCase(const char *s2) const
void ParseTabDelStr(const KKStr &_str)
void svm_GetSupportVectorStatistics(const struct SvmModel233 *model, kkint32 &numSVs, kkint32 &totalNumSVs)
Extract Support Vector statistics .
void swap_index(kkint32 i, kkint32 j) const
void TrimWhiteSpace(const char *_whiteSpace=" ")
After this call all leading and trailing whitespace will be trimmed from tokens.
static void solve_one_class(const svm_problem *prob, const svm_parameter *param, double *alpha, Solver::SolutionInfo *si)
void Solve(kkint32 l, const Kernel &Q, const double *b_, const schar *y_, double *alpha_, double Cp, double Cn, double eps, SolutionInfo *si, kkint32 shrinking)
decision_function svm_train_one(const svm_problem *prob, const svm_parameter *param, double Cp, double Cn, std::set< kkint32 > &BSVIndex)
KKStr ExtractToken2(const char *delStr="\n\t\r ")
Extract first Token from the string.
KKStr GetNextToken(const char *delStr="\n\t\r ")
Extract next Token from string, tokens will be separated by delimiter characters. ...
Kernel(kkint32 l, svm_node *const *x, const svm_parameter ¶m)
KKStr ExtractToken(const char *delStr="\n\t\r ")
ONE_CLASS_Q(const svm_problem &prob, const svm_parameter ¶m)
KKStr ToCmdLineStr() const
void Solve(kkint32 l, const Kernel &Q, const double *b, const schar *y, double *alpha, double Cp, double Cn, double eps, SolutionInfo *si, kkint32 shrinking)
static void solve_c_svc(const svm_problem *prob, const svm_parameter *param, double *alpha, Solver::SolutionInfo *si, double *C_)
XmlContent * XmlContentPtr
const char * kernel_type_table[]
double(Kernel::* kernel_function)(kkint32 i, kkint32 j) const
static void solve_nu_svr(const svm_problem *prob, const svm_parameter *param, double *alpha, Solver::SolutionInfo *si)
bool operator==(const char *rtStr) const
virtual void WriteXML(const KKStr &varName, std::ostream &o) const
Qfloat * get_Q(kkint32 i, kkint32 len) const
decision_function svm_train_one(const svm_problem *prob, const svm_parameter *param)
KKStr SupportVectorName(kkint32 svIDX)
struct SvmModel233 * svm_train(const struct svm_problem *prob, const struct svm_parameter *param)
bool is_upper_bound(kkint32 i)
double svm_predictTwoClasses(const SvmModel233 *model, const svm_node *x, double &dist, kkint32 excludeSupportVectorIDX)
KKStr & operator=(KKStr &&src)
virtual Qfloat * get_Q(kkint32 column, kkint32 len) const =0
struct SvmModel233 * Svm_Load_Model(std::istream &f, RunLog &log)
virtual kkint32 ToInt32() const
kkuint32 Len() const
Returns the number of characters in the string.
SVC_Q(const svm_problem &prob, const svm_parameter ¶m, const schar *y_)
svm_parameter(KKStr ¶mStr)
Cache(kkint32 l, kkint32 size)
KKStrParser(const KKStr &_str)
double ExtractTokenDouble(const char *delStr)
void swap_index(kkint32 i, kkint32 j)
XmlElement * XmlElementPtr
void AddAtribute(const KKStr &attributeName, const KKStr &attributeValue)
virtual kkint32 select_working_set(kkint32 &i, kkint32 &j)
SVR_Q(const svm_problem &prob, const svm_parameter ¶m)
void swap_index(kkint32 i, kkint32 j) const
kkint32 get_data(const kkint32 index, Qfloat **data, kkint32 len)
void TrimLeft(const char *whiteSpaceChars="\n\r\t ")
KKStr ToTabDelStr() const
XmlTag const * XmlTagConstPtr
Manages the reading and writing of objects in a simple XML format. For a class to be supported by Xml...
void clone(T *&dst, S *src, kkint32 n)
bool is_lower_bound(kkint32 i)
static void solve_nu_svc(const svm_problem *prob, const svm_parameter *param, double *alpha, Solver::SolutionInfo *si)
static KKStr Concat(const std::vector< std::string > &values)
Concatenates the list of 'std::string' strings.
void Upper()
Converts all characters in string to their Upper case equivalents via 'toupper'.
static void solve_c_svc(const svm_problem *prob, const svm_parameter *param, double *alpha, Solver::SolutionInfo *si, double Cp, double Cn)
virtual void do_shrinking()
kkint32 MemoryConsumedEstimated() const
double GetNextTokenDouble(const char *delStr="\n\t\r ")
virtual const KKStr & VarName() const
Qfloat * get_Q(kkint32 i, kkint32 len) const
kkint32 GetNextTokenInt(const char *delStr="\n\t\r ")
void WriteXML(const KKStr &varName, std::ostream &o) const
std::ostream &__cdecl operator<<(std::ostream &os, const KKStr &str)
kkint32 ExtractTokenInt(const char *delStr)
void svm_get_labels(const SvmModel233 *model, kkint32 *label)
virtual const KKStr & SectionName() const
virtual double calculate_rho()
void update_alpha_status(kkint32 i)
void ProcessSvmParameter(KKStr cmd, KKStr value, double valueNum, bool &parmUsed)
virtual void swap_index(kkint32 i, kkint32 j) const
kkint32 svm_get_nr_class(const SvmModel233 *model)
char operator[](kkint32 i) const
void WriteXML(std::ostream &o)
KKStr & operator=(const KKStr &src)
KKStrPtr const Content() const
Used for logging messages.
void EncodeProblem(const struct svm_paramater ¶m, struct svm_problem &prob_in, struct svm_problem &prob_out)
const char * svm_type_table[]
void svm_margin(SvmModel233 *model)
void Svm_Save_Model(std::ostream &o, const SvmModel233 *model)
virtual TokenTypes TokenType()=0
Class that manages the extraction of tokens from a String without being destructive to the original s...
void swap_index(kkint32 i, kkint32 j) const
void info(const char *fmt,...)
const char * svm_check_parameter(const struct svm_problem *prob, const struct svm_parameter *param)
svm_parameter & operator=(const svm_parameter &right)
virtual XmlTokenPtr GetNextToken(VolConstBool &cancelFlag, RunLog &log)
KKException(const KKStr &_exceptionStr)
svm_parameter(const svm_parameter ¶meters)
static void solve_epsilon_svr(const svm_problem *prob, const svm_parameter *param, double *alpha, Solver::SolutionInfo *si)
KKStr SubStrPart(kkint32 firstChar) const
returns a SubString consisting of all characters starting at index 'firstChar' until the end of the s...
void swap_index(kkint32 i, kkint32 j)
#define XmlFactoryMacro(NameOfClass)
void reconstruct_gradient()
void svm_destroy_model(struct SvmModel233 *model)
void Solve(kkint32 l, const Kernel &Q, const double *b_, const schar *y_, double *alpha_, double *C_, double eps, SolutionInfo *si, kkint32 shrinking)
svm_problem * svm_BuildProbFromTwoClassModel(const SvmModel233 *model, kkint32 excludeSupportVectorIDX)
virtual void ReadXML(XmlStream &s, XmlTagConstPtr tag, VolConstBool &cancelFlag, RunLog &log)
static double k_function_subspace(const svm_node *x, const svm_node *y, const svm_parameter ¶m, const double *featureWeight)
static double k_function(const svm_node *x, const svm_node *y, const svm_parameter ¶m)
volatile const bool VolConstBool
Qfloat * get_Q(kkint32 i, kkint32 len) const