1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
|
#include "ranksvmtn.h"
#include<iostream>
#include<list>
#include"../tools/matrixIO.h"
using namespace std;
using namespace Eigen;
const int maxiter = 10;
const double prec=1e-4;
const double C=1;
int cg_solve(const MatrixXd &A, const VectorXd &b, VectorXd &x)
{
double alpha,beta,r_1,r_2;
int step=0;
VectorXd q;
VectorXd res = b - A*x; // TODO Hessian product
VectorXd p = res;
while (1)
{
// Non preconditioned version
r_1 = res.dot(res);
if (r_1<1e-10) // Terminate condition
break;
if (step){
beta = r_1 / r_2;
p = res + p * beta;
}
q = A*p; // TODO Hessian product
alpha = r_1/p.dot(q);
x=x+p*alpha;
res=res-q*alpha;
++step;
r_2=r_1;
}
return 0;
}
// Calculate objfunc gradient & support vectors
int objfunc_linear(const VectorXd &w,const MatrixXd &D,const vector<int> &A1,const vector<int> &A2,const double C,VectorXd &pred,VectorXd &grad, double &obj)
{
for (int i=0;i<pred.rows();++i)
pred(i)=pred(i)>0?pred(i):0;
obj = (pred.cwiseProduct(pred)*C).sum()/2 + w.dot(w)/2;
VectorXd pA = VectorXd::Zero(D.rows());
for (int i=0;i<A1.size();++i) {
pA(A1[i]) = pA(A1[i]) + pred(i);
pA(A2[i]) = pA(A2[i]) - pred(i);
}
grad = w - (pA.transpose()*D).transpose();
return 0;
}
// line search using newton method
int line_search(const VectorXd &w,const MatrixXd &D,const vector<int> &A1,const vector<int> &A2,const VectorXd &step,VectorXd &pred,double &t)
{
double wd=w.dot(step),dd=step.dot(step);
VectorXd Dd = D*step;
VectorXd Xd = VectorXd::Zero(A1.size());
for (int i=0;i<A1.size();++i)
Xd(i) = Dd(A1[i])-Dd(A2[i]);
double g,h;
t = 0;
VectorXd pred2;
while (1)
{
pred2 = pred - t*Xd;
g=wd+t*dd;
h=dd;
for (int i=0;i<pred2.rows();++i)
if (pred2(i)>0) {
g -= C*pred2(i)*Xd(i);
h += C*Xd(i)*Xd(i);
}
g=g+1e-12;
h=h+1e-12;
t=t-g/h;
if (g*g/h<1e-10)
break;
}
pred=pred2;
return 0;
}
int train_orig(int fsize, MatrixXd &D,vector<int> &A1,vector<int> &A2,VectorXd &weight){
int iter = 0;
long n=A1.size();
LOG(INFO) << "training with feature size:" << fsize << " Data size:" << n << " Relation size:" << A1.size();
VectorXd grad(fsize);
VectorXd step(fsize);
VectorXd pred(n);
double obj,t;
VectorXd dw = D*weight;
pred=VectorXd::Zero(n);
for (int i=0;i<n;++i)
pred(i) = 1 - dw(A1[i])+dw(A2[i]);
while (true)
{
iter+=1;
if (iter> maxiter)
{
LOG(INFO)<< "Maxiter :"<<maxiter<<" reached";
break;
}
// Generate support vector matrix sv & gradient
objfunc_linear(weight,D,A1,A2,C,pred,grad,obj);
step = grad*0;
MatrixXd H = MatrixXd::Identity(grad.rows(),grad.rows());
// Compute Hessian directly
for (int i=0;i<n;++i)
if (pred(i)>0) {
VectorXd v = D.row(A1[i])-D.row(A2[i]);
H = H + C * (v * v.transpose());
}
// Solve
//cout<<obj<<endl;
cg_solve(H,grad,step);
// do line search
line_search(weight,D,A1,A2,step,pred,t);
weight=weight+step*t;
int sv=0;
for (int i=0;i<n;++i)
if (pred(i)>0)
++sv;
// When dec is small enough
LOG(INFO)<<"Iter: "<<iter<<" Obj: " <<obj<<" SV: "<< sv << " Newton decr:"<<step.dot(grad)/2 << " linesearch: "<< -t ;
if (step.dot(grad) < prec * obj)
break;
}
return 0;
}
int RSVMTN::train(DataList &D){
MatrixXd Data(D.getSize(),D.getfSize());
vector<int> A1,A2;
int i,j;
LOG(INFO)<<"Processing input";
for (i=0;i<D.getSize();++i) {
for (j = 0; j < D.getfSize(); ++j)
Data(i, j) = (D.getData()[i])->feature(j);
}
int cnt=0;
i=j=0;
while (i<D.getSize())
{
if ((i+1 == D.getSize())|| D.getData()[i]->qid!=D.getData()[i+1]->qid)
{
int high=j;
while (D.getData()[high]->rank>0)
++high;
cnt += (high-j)*(i-high+1);
j = i+1;
}
++i;
}
cnt=i=j=0;
while (i<D.getSize())
{
if ((i+1 == D.getSize())|| D.getData()[i]->qid!=D.getData()[i+1]->qid)
{
int v1=j,v2;
for (v1=j;(D.getData()[v1]->rank)>0;++v1)
for (v2=i;(D.getData()[v2]->rank)<0;--v2) {
A1.push_back(v1);
A2.push_back(v2);
++cnt;
}
j = i+1;
}
++i;
}
train_orig(fsize,Data,A1,A2,model.weight);
return 0;
};
int RSVMTN::predict(DataList &D, vector<double> &res){
res.clear();
for (int i=0;i<D.getSize();++i)
res.push_back(((D.getData()[i])->feature).dot(model.weight));
return 0;
};
|