diff options
author | Joe Zhao <ztuowen@gmail.com> | 2015-03-12 18:21:48 +0800 |
---|---|---|
committer | Joe Zhao <ztuowen@gmail.com> | 2015-03-12 18:21:48 +0800 |
commit | 68b7998f12e7e3c9b257a9d2a4c14e8d32a048b9 (patch) | |
tree | 8be6053f362cc190508ea46d6d87e72a59ce7cf9 /model/ranksvmtn.cpp | |
parent | fd32eb3bfc64564d64401f9c2ee03b1557be9c71 (diff) | |
download | ranksvm-68b7998f12e7e3c9b257a9d2a4c14e8d32a048b9.tar.gz ranksvm-68b7998f12e7e3c9b257a9d2a4c14e8d32a048b9.tar.bz2 ranksvm-68b7998f12e7e3c9b257a9d2a4c14e8d32a048b9.zip |
modeling
Diffstat (limited to 'model/ranksvmtn.cpp')
-rw-r--r-- | model/ranksvmtn.cpp | 48 |
1 files changed, 48 insertions, 0 deletions
diff --git a/model/ranksvmtn.cpp b/model/ranksvmtn.cpp index 746e967..59980b4 100644 --- a/model/ranksvmtn.cpp +++ b/model/ranksvmtn.cpp @@ -3,10 +3,58 @@ using namespace std; using namespace Eigen; +const int maxiter = 10; +const double prec=1e-3; + +// Calculate objfunc gradient & support vectors +int objfunc_linear(const VectorXd &w,const double C,const VectorXd &pred, double &obj,MatrixXd &sv) +{ + +} + +// line search +int line_search(const VectorXd &w,const double C,const VectorXd &step,VectorXd &pred,double &t) +{ + +} + int RSVMTN::train(DataSet &D, Labels &label){ + int iter = 0; + + MatrixXd A; + + int n=D.rows(); + LOG(INFO) << "training with feature size:" << fsize << " Data size:" << n; + MatrixXd sv=MatrixXd::Identity(n, n); + VectorXd grad(fsize); + VectorXd step(fsize); + VectorXd pred(n); + double obj,t; + + pred=VectorXd::Ones(n) - (A*(D*model.weight)); + + while (true) + { + iter+=1; + if (iter> maxiter) + { + LOG(INFO)<< "Maxiter :"<<maxiter<<" reached"; + break; + } + + // Generate support vector matrix sv & gradient + model.weight=model.weight+step*t; + // When dec is small enough + if (-step.dot(grad) < prec * obj) + break; + } + return 0; }; int RSVMTN::predict(DataSet &D, Labels &res){ + res = model.weight * D; + for (int i=0;i<res.cols();++i) + res[i] = (res[i] + model.beta); return 0; };
\ No newline at end of file |