summaryrefslogtreecommitdiff
path: root/model/ranksvmtn.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'model/ranksvmtn.cpp')
-rw-r--r--model/ranksvmtn.cpp48
1 files changed, 48 insertions, 0 deletions
diff --git a/model/ranksvmtn.cpp b/model/ranksvmtn.cpp
index 746e967..59980b4 100644
--- a/model/ranksvmtn.cpp
+++ b/model/ranksvmtn.cpp
@@ -3,10 +3,58 @@
using namespace std;
using namespace Eigen;
+const int maxiter = 10;
+const double prec=1e-3;
+
+// Calculate objfunc gradient & support vectors
+int objfunc_linear(const VectorXd &w,const double C,const VectorXd &pred, double &obj,MatrixXd &sv)
+{
+
+}
+
+// line search
+int line_search(const VectorXd &w,const double C,const VectorXd &step,VectorXd &pred,double &t)
+{
+
+}
+
int RSVMTN::train(DataSet &D, Labels &label){
+ int iter = 0;
+
+ MatrixXd A;
+
+ int n=D.rows();
+ LOG(INFO) << "training with feature size:" << fsize << " Data size:" << n;
+ MatrixXd sv=MatrixXd::Identity(n, n);
+ VectorXd grad(fsize);
+ VectorXd step(fsize);
+ VectorXd pred(n);
+ double obj,t;
+
+ pred=VectorXd::Ones(n) - (A*(D*model.weight));
+
+ while (true)
+ {
+ iter+=1;
+ if (iter> maxiter)
+ {
+ LOG(INFO)<< "Maxiter :"<<maxiter<<" reached";
+ break;
+ }
+
+ // Generate support vector matrix sv & gradient
+ model.weight=model.weight+step*t;
+ // When dec is small enough
+ if (-step.dot(grad) < prec * obj)
+ break;
+ }
+
return 0;
};
int RSVMTN::predict(DataSet &D, Labels &res){
+ res = model.weight * D;
+ for (int i=0;i<res.cols();++i)
+ res[i] = (res[i] + model.beta);
return 0;
}; \ No newline at end of file