#include "ranksvmtn.h" using namespace std; using namespace Eigen; const int maxiter = 10; const double prec=1e-3; int cg_solve(const MatrixXd &A, const VectorXd &b, const VectorXd &x) { double alpha,beta,r_1,r_2; VectorXd p = x; VectorXd q; VectorXd res; while (1) { beta = r_1/r_2; p = res + beta*p; q = A*p; alpha = r_1/p.dot(q); // Non preconditioned version alpha = p.dot(p)/(p.dot(q)); res=res-alpha*q; break; } return 0; } // Calculate objfunc gradient & support vectors int objfunc_linear(const VectorXd &w,const double C,const VectorXd &pred,const VectorXd &grad, double &obj,MatrixXd &sv) { pred = pred.cwiseMax(Matrix::Zero(pred.rows(),pred.cols())); obj = (pred.cwiseProduct(pred)*(C/2)) + w.transpose()*w/2; grad = w - (((pred*C).transpose()*A)*w).transpose(); for (int i=0;i0) sv(i,i)=1; else sv(i,i)=0; return 0; } // line search int line_search(const VectorXd &w,const double C,const VectorXd &step,VectorXd &pred,double &t) { return 0; } int RSVMTN::train(DataSet &D, Labels &label){ int iter = 0; MatrixXd A; int n=D.rows(); LOG(INFO) << "training with feature size:" << fsize << " Data size:" << n; MatrixXd sv=MatrixXd::Identity(n, n); VectorXd grad(fsize); VectorXd step(fsize); VectorXd pred(n); double obj,t; pred=VectorXd::Ones(n) - (A*(D*model.weight)); while (true) { iter+=1; if (iter> maxiter) { LOG(INFO)<< "Maxiter :"<