summaryrefslogtreecommitdiff
path: root/model/ranksvmtn.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'model/ranksvmtn.cpp')
-rw-r--r--model/ranksvmtn.cpp42
1 files changed, 29 insertions, 13 deletions
diff --git a/model/ranksvmtn.cpp b/model/ranksvmtn.cpp
index 539ab5e..fe29468 100644
--- a/model/ranksvmtn.cpp
+++ b/model/ranksvmtn.cpp
@@ -1,4 +1,6 @@
#include "ranksvmtn.h"
+#include<iostream>
+#include"../tools/matrixIO.h"
using namespace std;
using namespace Eigen;
@@ -6,31 +8,44 @@ using namespace Eigen;
const int maxiter = 10;
const double prec=1e-3;
-int cg_solve(const MatrixXd &A, const VectorXd &b, const VectorXd &x)
+int cg_solve(const MatrixXd &A, const VectorXd &b, VectorXd &x)
{
double alpha,beta,r_1,r_2;
- VectorXd p = x;
+ int step=0;
VectorXd q;
- VectorXd res;
+ VectorXd res = b - A*x;
+ VectorXd p = res;
while (1)
{
- beta = r_1/r_2;
- p = res + beta*p;
+ // Non preconditioned version
+ r_1 = res.dot(res);
+ cout<<step<<":"<<r_1<<endl;
+ write_stream(cout,res);
+ if (r_1<1e-5) // Terminate condition
+ break;
+ if (step){
+ beta = r_1 / r_2;
+ p = res + p * beta;
+ }
+
q = A*p;
alpha = r_1/p.dot(q);
- // Non preconditioned version
- alpha = p.dot(p)/(p.dot(q));
- res=res-alpha*q;
- break;
+ x=x+p*alpha;
+ res=res-q*alpha;
+ write_stream(cout,p);
+ write_stream(cout,q);
+ cin.get();
+ ++step;
+ r_2=r_1;
}
return 0;
}
// Calculate objfunc gradient & support vectors
-int objfunc_linear(const VectorXd &w,const double C,const VectorXd &pred,const VectorXd &grad, double &obj,MatrixXd &sv)
+int objfunc_linear(const VectorXd &w,const MatrixXd &A,const double C,VectorXd &pred,VectorXd &grad, double &obj,MatrixXd &sv)
{
- pred = pred.cwiseMax(Matrix::Zero(pred.rows(),pred.cols()));
- obj = (pred.cwiseProduct(pred)*(C/2)) + w.transpose()*w/2;
+ pred = pred.cwiseMax(MatrixXd::Zero(pred.rows(),pred.cols()));
+// obj = (pred.cwiseProduct(pred)*(C/2)) + w.transpose()*w/2;
grad = w - (((pred*C).transpose()*A)*w).transpose();
for (int i=0;i<pred.cols();++i)
if (pred(i)>0)
@@ -50,6 +65,7 @@ int RSVMTN::train(DataSet &D, Labels &label){
int iter = 0;
MatrixXd A;
+ // TODO Undefined
int n=D.rows();
LOG(INFO) << "training with feature size:" << fsize << " Data size:" << n;
@@ -71,7 +87,7 @@ int RSVMTN::train(DataSet &D, Labels &label){
}
// Generate support vector matrix sv & gradient
- objfunc_linear(D,1,pred,grad,obj,sv);
+ objfunc_linear(D,A,1,pred,grad,obj,sv);
model.weight=model.weight+step*t;
// When dec is small enough
if (-step.dot(grad) < prec * obj)