summaryrefslogtreecommitdiff
path: root/model/ranksvmtn.cpp
blob: 539ab5eee60e5e08a4096ef14ddb55e179942ebf (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
#include "ranksvmtn.h"

using namespace std;
using namespace Eigen;

const int maxiter = 10;
const double prec=1e-3;

int cg_solve(const MatrixXd &A, const VectorXd &b, const VectorXd &x)
{
    double alpha,beta,r_1,r_2;
    VectorXd p = x;
    VectorXd q;
    VectorXd res;
    while (1)
    {
        beta = r_1/r_2;
        p = res + beta*p;
        q = A*p;
        alpha = r_1/p.dot(q);
        // Non preconditioned version
        alpha = p.dot(p)/(p.dot(q));
        res=res-alpha*q;
        break;
    }
    return 0;
}

// Calculate objfunc gradient & support vectors
int objfunc_linear(const VectorXd &w,const double C,const VectorXd &pred,const VectorXd &grad, double &obj,MatrixXd &sv)
{
    pred = pred.cwiseMax(Matrix::Zero(pred.rows(),pred.cols()));
    obj = (pred.cwiseProduct(pred)*(C/2)) + w.transpose()*w/2;
    grad = w - (((pred*C).transpose()*A)*w).transpose();
    for (int i=0;i<pred.cols();++i)
        if (pred(i)>0)
            sv(i,i)=1;
        else
            sv(i,i)=0;
    return 0;
}

// line search
int line_search(const VectorXd &w,const double C,const VectorXd &step,VectorXd &pred,double &t)
{
    return 0;
}

int RSVMTN::train(DataSet &D, Labels &label){
    int iter = 0;

    MatrixXd A;

    int n=D.rows();
    LOG(INFO) << "training with feature size:" << fsize << " Data size:" << n;
    MatrixXd sv=MatrixXd::Identity(n, n);
    VectorXd grad(fsize);
    VectorXd step(fsize);
    VectorXd pred(n);
    double obj,t;

    pred=VectorXd::Ones(n) - (A*(D*model.weight));

    while (true)
    {
        iter+=1;
        if (iter> maxiter)
        {
            LOG(INFO)<< "Maxiter :"<<maxiter<<" reached";
            break;
        }

        // Generate support vector matrix sv & gradient
        objfunc_linear(D,1,pred,grad,obj,sv);
        model.weight=model.weight+step*t;
        // When dec is small enough
        if (-step.dot(grad) < prec * obj)
            break;
    }

    return 0;
};

int RSVMTN::predict(DataSet &D, Labels &res){
    res = model.weight * D;
    for (int i=0;i<res.cols();++i)
        res[i] = (res[i] + model.beta);
    return 0;
};