summaryrefslogtreecommitdiff
path: root/model/ranksvmtn.cpp
blob: fe2946876c1709787bcfa0019d9912436c5ead33 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
#include "ranksvmtn.h"
#include<iostream>
#include"../tools/matrixIO.h"

using namespace std;
using namespace Eigen;

const int maxiter = 10;
const double prec=1e-3;

int cg_solve(const MatrixXd &A, const VectorXd &b, VectorXd &x)
{
    double alpha,beta,r_1,r_2;
    int step=0;
    VectorXd q;
    VectorXd res = b - A*x;
    VectorXd p = res;
    while (1)
    {
        // Non preconditioned version
        r_1 = res.dot(res);
        cout<<step<<":"<<r_1<<endl;
        write_stream(cout,res);
        if (r_1<1e-5) // Terminate condition
            break;
        if (step){
            beta = r_1 / r_2;
            p = res + p * beta;
        }

        q = A*p;
        alpha = r_1/p.dot(q);
        x=x+p*alpha;
        res=res-q*alpha;
        write_stream(cout,p);
        write_stream(cout,q);
        cin.get();
        ++step;
        r_2=r_1;
    }
    return 0;
}

// Calculate objfunc gradient & support vectors
int objfunc_linear(const VectorXd &w,const MatrixXd &A,const double C,VectorXd &pred,VectorXd &grad, double &obj,MatrixXd &sv)
{
    pred = pred.cwiseMax(MatrixXd::Zero(pred.rows(),pred.cols()));
//    obj = (pred.cwiseProduct(pred)*(C/2)) + w.transpose()*w/2;
    grad = w - (((pred*C).transpose()*A)*w).transpose();
    for (int i=0;i<pred.cols();++i)
        if (pred(i)>0)
            sv(i,i)=1;
        else
            sv(i,i)=0;
    return 0;
}

// line search
int line_search(const VectorXd &w,const double C,const VectorXd &step,VectorXd &pred,double &t)
{
    return 0;
}

int RSVMTN::train(DataSet &D, Labels &label){
    int iter = 0;

    MatrixXd A;
    // TODO Undefined

    int n=D.rows();
    LOG(INFO) << "training with feature size:" << fsize << " Data size:" << n;
    MatrixXd sv=MatrixXd::Identity(n, n);
    VectorXd grad(fsize);
    VectorXd step(fsize);
    VectorXd pred(n);
    double obj,t;

    pred=VectorXd::Ones(n) - (A*(D*model.weight));

    while (true)
    {
        iter+=1;
        if (iter> maxiter)
        {
            LOG(INFO)<< "Maxiter :"<<maxiter<<" reached";
            break;
        }

        // Generate support vector matrix sv & gradient
        objfunc_linear(D,A,1,pred,grad,obj,sv);
        model.weight=model.weight+step*t;
        // When dec is small enough
        if (-step.dot(grad) < prec * obj)
            break;
    }

    return 0;
};

int RSVMTN::predict(DataSet &D, Labels &res){
    res = model.weight * D;
    for (int i=0;i<res.cols();++i)
        res[i] = (res[i] + model.beta);
    return 0;
};