diff options
-rw-r--r-- | model/ranksvm.h | 7 | ||||
-rw-r--r-- | model/ranksvmtn.cpp | 48 |
2 files changed, 55 insertions, 0 deletions
diff --git a/model/ranksvm.h b/model/ranksvm.h index edb80f6..b4ec7ce 100644 --- a/model/ranksvm.h +++ b/model/ranksvm.h @@ -6,6 +6,13 @@ #include"../tools/dataProvider.h" #include "../tools/easylogging++.h" +// Model File: +// Model type -> String +// Number of features (fsize) -> int +// Weight matrix size, (fsize,1) -> (int,int) +// Weight vector +// beta + typedef struct SVMModel{ Eigen::VectorXd weight; double beta; diff --git a/model/ranksvmtn.cpp b/model/ranksvmtn.cpp index 746e967..59980b4 100644 --- a/model/ranksvmtn.cpp +++ b/model/ranksvmtn.cpp @@ -3,10 +3,58 @@ using namespace std; using namespace Eigen; +const int maxiter = 10; +const double prec=1e-3; + +// Calculate objfunc gradient & support vectors +int objfunc_linear(const VectorXd &w,const double C,const VectorXd &pred, double &obj,MatrixXd &sv) +{ + +} + +// line search +int line_search(const VectorXd &w,const double C,const VectorXd &step,VectorXd &pred,double &t) +{ + +} + int RSVMTN::train(DataSet &D, Labels &label){ + int iter = 0; + + MatrixXd A; + + int n=D.rows(); + LOG(INFO) << "training with feature size:" << fsize << " Data size:" << n; + MatrixXd sv=MatrixXd::Identity(n, n); + VectorXd grad(fsize); + VectorXd step(fsize); + VectorXd pred(n); + double obj,t; + + pred=VectorXd::Ones(n) - (A*(D*model.weight)); + + while (true) + { + iter+=1; + if (iter> maxiter) + { + LOG(INFO)<< "Maxiter :"<<maxiter<<" reached"; + break; + } + + // Generate support vector matrix sv & gradient + model.weight=model.weight+step*t; + // When dec is small enough + if (-step.dot(grad) < prec * obj) + break; + } + return 0; }; int RSVMTN::predict(DataSet &D, Labels &res){ + res = model.weight * D; + for (int i=0;i<res.cols();++i) + res[i] = (res[i] + model.beta); return 0; };
\ No newline at end of file |