diff options
author | Joe Zhao <ztuowen@gmail.com> | 2015-05-11 18:38:39 +0800 |
---|---|---|
committer | Joe Zhao <ztuowen@gmail.com> | 2015-05-11 18:38:39 +0800 |
commit | c69b39a9f149cc6b5c7270d7d864fb677bc83b34 (patch) | |
tree | bba8f4b244c5892265709b822751b9be56b4bf47 /model/ranksvmtn.cpp | |
parent | 0f05a379b2c5df0b05c23fd91d697464bd250507 (diff) | |
download | ranksvm-c69b39a9f149cc6b5c7270d7d864fb677bc83b34.tar.gz ranksvm-c69b39a9f149cc6b5c7270d7d864fb677bc83b34.tar.bz2 ranksvm-c69b39a9f149cc6b5c7270d7d864fb677bc83b34.zip |
RidFile tested
Diffstat (limited to 'model/ranksvmtn.cpp')
-rw-r--r-- | model/ranksvmtn.cpp | 27 |
1 files changed, 20 insertions, 7 deletions
diff --git a/model/ranksvmtn.cpp b/model/ranksvmtn.cpp index b82ce64..c2ca639 100644 --- a/model/ranksvmtn.cpp +++ b/model/ranksvmtn.cpp @@ -6,12 +6,13 @@ using namespace std; using namespace Eigen; -const double C=1e-2; // Compensating & scaling +const double C=1e-5; // Compensating & scaling // Main terminating criteria const int maxiter = 10; // max iteration count -const double prec=1e-4; // precision +const double prec=1e-10; // precision // conjugate gradient const double cg_prec=1e-10; // precision +const int cg_maxiter = 30; // line search const double line_prec=1e-10; // precision const double line_turb=1e-15; // purturbation @@ -64,6 +65,11 @@ int cg_solve(const MatrixXd &D,const vector<int> &rank,const VectorXd &corr,cons x=x+p*alpha; res=res-q*alpha; ++step; + if (step > cg_maxiter) + { + LOG(INFO) << "CG force terminated by maxiter"; + break; + } r_2=r_1; } return 0; @@ -138,6 +144,7 @@ int line_search(const VectorXd &w,const MatrixXd &D,const VectorXd &corr,const v VectorXd grad; VectorXd Hs; vector<int> rank(D.rows()); + int iter = 0; for (int i=0;i<A1.size();++i) Xd(i) = Dd(A1[i])-Dd(A2[i]); @@ -157,6 +164,12 @@ int line_search(const VectorXd &w,const MatrixXd &D,const VectorXd &corr,const v t=t-g/h; if (g*g/h<line_prec) break; + ++iter; + if (iter > cg_maxiter) + { + LOG(INFO) << "line search force terminated by maxiter"; + break; + } } return 0; } @@ -179,14 +192,12 @@ int train_orig(int fsize, MatrixXd &D,const vector<int> &A1,const vector<int> &A iter+=1; if (iter> maxiter) { - LOG(INFO)<< "Maxiter :"<<maxiter<<" reached"; + LOG(INFO)<< "Maxiter reached"; break; } dw = D*weight; - cal_alpha_beta(dw,corr,A1,A2,rank,yt,alpha,beta); - // Generate support vector matrix sv & gradient obj = (weight.dot(weight) + C*(alpha.dot(yt.cwiseProduct(yt))-beta.dot(yt)))/2;// grad = weight + C*(D.transpose()*(alpha.cwiseProduct(yt)-beta)); @@ -213,8 +224,10 @@ int RSVMTN::train(DataList &D){ vector<DataEntry*> &dat = D.getData(); for (i=0;i<D.getSize();++i) { corr(i)=(dat[i])->rank>0?0.5:-0.5; - for (j = 0; j < D.getfSize(); ++j) - Data(i, j) = dat[i]->feature(j); + + for (j = 0; j < D.getfSize(); ++j){ + Data(i, j) = dat[i]->feature(j);} + } i=j=0; while (i<D.getSize()) |