summaryrefslogtreecommitdiff
path: root/model/ranksvmtn.cpp
blob: 6a7057dafcf431450cc13954a746885ac4198f88 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
#include "ranksvmtn.h"

using namespace std;
using namespace Eigen;

const int maxiter = 10;
const double prec=1e-3;

// Calculate objfunc gradient & support vectors
int objfunc_linear(const VectorXd &w,const double C,const VectorXd &pred,const VectorXd &grad, double &obj,MatrixXd &sv)
{
    pred = pred.cwiseMax(Matrix::Zero(pred.rows(),pred.cols()));
    obj = (pred.cwiseProduct(pred)*(C/2)) + w.transpose()*w/2;
    grad = w - (((pred*C).transpose()*A)*w).transpose();
    for (int i=0;i<pred.cols();++i)
        if (pred(i)>0)
            sv(i,i)=1;
        else
            sv(i,i)=0;
}

// line search
int line_search(const VectorXd &w,const double C,const VectorXd &step,VectorXd &pred,double &t)
{

}

int RSVMTN::train(DataSet &D, Labels &label){
    int iter = 0;

    MatrixXd A;

    int n=D.rows();
    LOG(INFO) << "training with feature size:" << fsize << " Data size:" << n;
    MatrixXd sv=MatrixXd::Identity(n, n);
    VectorXd grad(fsize);
    VectorXd step(fsize);
    VectorXd pred(n);
    double obj,t;

    pred=VectorXd::Ones(n) - (A*(D*model.weight));

    while (true)
    {
        iter+=1;
        if (iter> maxiter)
        {
            LOG(INFO)<< "Maxiter :"<<maxiter<<" reached";
            break;
        }

        // Generate support vector matrix sv & gradient
        objfunc_linear(D,1,pred,grad,obj,sv);
        model.weight=model.weight+step*t;
        // When dec is small enough
        if (-step.dot(grad) < prec * obj)
            break;
    }

    return 0;
};

int RSVMTN::predict(DataSet &D, Labels &res){
    res = model.weight * D;
    for (int i=0;i<res.cols();++i)
        res[i] = (res[i] + model.beta);
    return 0;
};