diff options
| author | Joe Zhao <ztuowen@gmail.com> | 2015-03-12 18:21:48 +0800 | 
|---|---|---|
| committer | Joe Zhao <ztuowen@gmail.com> | 2015-03-12 18:21:48 +0800 | 
| commit | 68b7998f12e7e3c9b257a9d2a4c14e8d32a048b9 (patch) | |
| tree | 8be6053f362cc190508ea46d6d87e72a59ce7cf9 | |
| parent | fd32eb3bfc64564d64401f9c2ee03b1557be9c71 (diff) | |
| download | ranksvm-68b7998f12e7e3c9b257a9d2a4c14e8d32a048b9.tar.gz ranksvm-68b7998f12e7e3c9b257a9d2a4c14e8d32a048b9.tar.bz2 ranksvm-68b7998f12e7e3c9b257a9d2a4c14e8d32a048b9.zip  | |
modeling
| -rw-r--r-- | model/ranksvm.h | 7 | ||||
| -rw-r--r-- | model/ranksvmtn.cpp | 48 | 
2 files changed, 55 insertions, 0 deletions
diff --git a/model/ranksvm.h b/model/ranksvm.h index edb80f6..b4ec7ce 100644 --- a/model/ranksvm.h +++ b/model/ranksvm.h @@ -6,6 +6,13 @@  #include"../tools/dataProvider.h"  #include "../tools/easylogging++.h" +// Model File: +// Model type -> String +// Number of features (fsize) -> int +// Weight matrix size, (fsize,1) ->  (int,int) +// Weight vector +// beta +  typedef struct SVMModel{      Eigen::VectorXd weight;      double beta; diff --git a/model/ranksvmtn.cpp b/model/ranksvmtn.cpp index 746e967..59980b4 100644 --- a/model/ranksvmtn.cpp +++ b/model/ranksvmtn.cpp @@ -3,10 +3,58 @@  using namespace std;  using namespace Eigen; +const int maxiter = 10; +const double prec=1e-3; + +// Calculate objfunc gradient & support vectors +int objfunc_linear(const VectorXd &w,const double C,const VectorXd &pred, double &obj,MatrixXd &sv) +{ + +} + +// line search +int line_search(const VectorXd &w,const double C,const VectorXd &step,VectorXd &pred,double &t) +{ + +} +  int RSVMTN::train(DataSet &D, Labels &label){ +    int iter = 0; + +    MatrixXd A; + +    int n=D.rows(); +    LOG(INFO) << "training with feature size:" << fsize << " Data size:" << n; +    MatrixXd sv=MatrixXd::Identity(n, n); +    VectorXd grad(fsize); +    VectorXd step(fsize); +    VectorXd pred(n); +    double obj,t; + +    pred=VectorXd::Ones(n) - (A*(D*model.weight)); + +    while (true) +    { +        iter+=1; +        if (iter> maxiter) +        { +            LOG(INFO)<< "Maxiter :"<<maxiter<<" reached"; +            break; +        } + +        // Generate support vector matrix sv & gradient +        model.weight=model.weight+step*t; +        // When dec is small enough +        if (-step.dot(grad) < prec * obj) +            break; +    } +      return 0;  };  int RSVMTN::predict(DataSet &D, Labels &res){ +    res = model.weight * D; +    for (int i=0;i<res.cols();++i) +        res[i] = (res[i] + model.beta);      return 0;  };
\ No newline at end of file  | 
