-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathkernels.cpp
78 lines (76 loc) · 1.81 KB
/
kernels.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
#include <vector>
#include <cmath>
#include "feature_node.h"
#include "kernels.h"
namespace svm_learning {
double
Kernel::euclidean_dist2(
std::vector<FeatureNode*> x, std::vector<FeatureNode*> z
) {
double sum=0;
int i,j;
int x_size = x.size();
int z_size = z.size();
for (i=0, j=0; i < x_size && j < z_size;) {
if (x[i]->index < z[j]->index) {
sum += x[i]->value * x[i]->value;
i++;
}
else if (z[j]->index < x[i]->index) {
sum += z[j]->value * z[j]->value;
j++;
}
else {
sum+=(x[i]->value-z[j]->value)*(x[i]->value-z[j]->value);
i++;
j++;
}
}
for (;x_size && i < x_size; i++) {
sum+=x[i]->value * x[i]->value;
}
for (;z_size && j<z_size; j++) {
sum += z[j]->value * z[j]->value;
}
return sum;
}
double
Kernel::dot_product(std::vector<FeatureNode*> x, std::vector<FeatureNode*> z) {
double sum=0;
int i,j;
int x_size = x.size();
int z_size = z.size();
for (i=0,j=0; x_size && z_size && i < x_size && j < z_size;) {
if (x[i]->index<z[j]->index) {
i++;
}
else if (z[j]->index<x[i]->index) {
j++;
}
else {
sum+=x[i]->value * z[j]->value;
i++;
j++;
}
}
return sum;
}
double
Kernel::k_linear(std::vector<FeatureNode*> x, std::vector<FeatureNode*> z) {
return dot_product(x, z);
}
double
Kernel::k_poly(std::vector<FeatureNode*> x, std::vector<FeatureNode*> z, double a, double b, double c) {
if (c == 1.0)
return a * dot_product(x, z) + b;
return pow(a * dot_product(x, z) + b, c);
}
double
Kernel::k_gaussian(std::vector<FeatureNode*> x, std::vector<FeatureNode*> z, double sigma) {
return (-0.5 / sigma * sigma) * euclidean_dist2(x, z);
}
double
Kernel::k_tanh(std::vector<FeatureNode*> x, std::vector<FeatureNode*> z, double a, double b) {
return tanh(a * dot_product(x, z) + b);
}
}