#include "ranksvmtn.h" using namespace std; using namespace Eigen; const int maxiter = 10; const double prec=1e-3; // Calculate objfunc gradient & support vectors int objfunc_linear(const VectorXd &w,const double C,const VectorXd &pred,const VectorXd &grad, double &obj,MatrixXd &sv) { pred = pred.cwiseMax(Matrix::Zero(pred.rows(),pred.cols())); obj = (pred.cwiseProduct(pred)*(C/2)) + w.transpose()*w/2; grad = w - (((pred*C).transpose()*A)*w).transpose(); for (int i=0;i0) sv(i,i)=1; else sv(i,i)=0; } // line search int line_search(const VectorXd &w,const double C,const VectorXd &step,VectorXd &pred,double &t) { } int RSVMTN::train(DataSet &D, Labels &label){ int iter = 0; MatrixXd A; int n=D.rows(); LOG(INFO) << "training with feature size:" << fsize << " Data size:" << n; MatrixXd sv=MatrixXd::Identity(n, n); VectorXd grad(fsize); VectorXd step(fsize); VectorXd pred(n); double obj,t; pred=VectorXd::Ones(n) - (A*(D*model.weight)); while (true) { iter+=1; if (iter> maxiter) { LOG(INFO)<< "Maxiter :"<