SHOGUN  4.1.0
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
GaussianARDKernel.cpp
Go to the documentation of this file.
1 /*
2  * This program is free software; you can redistribute it and/or modify
3  * it under the terms of the GNU General Public License as published by
4  * the Free Software Foundation; either version 3 of the License, or
5  * (at your option) any later version.
6  *
7  * Written (W) 2015 Wu Lin
8  * Written (W) 2012 Jacob Walker
9  *
10  * Adapted from WeightedDegreeRBFKernel.cpp
11  */
12 
15 
16 #ifdef HAVE_LINALG_LIB
18 #endif
19 
20 using namespace shogun;
21 
23 {
24  initialize();
25 }
26 
28 {
29 }
30 
31 void CGaussianARDKernel::initialize()
32 {
33  set_width(1.0);
34  SG_ADD(&m_width, "width", "Kernel width", MS_AVAILABLE, GRADIENT_AVAILABLE);
35 }
36 
37 #ifdef HAVE_LINALG_LIB
39  : CLinearARDKernel(size)
40 {
41  initialize();
42  set_width(width);
43 }
44 
46  CDotFeatures* r, int32_t size, float64_t width)
47  : CLinearARDKernel(size)
48 {
49  initialize();
50  set_width(width);
51 }
52 
53 bool CGaussianARDKernel::init(CFeatures* l, CFeatures* r)
54 {
55  return CLinearARDKernel::init(l,r);
56 }
57 
59 {
60  if (kernel->get_kernel_type()!=K_GAUSSIANARD)
61  {
62  SG_SERROR("Provided kernel is not of type CGaussianARDKernel!\n");
63  }
64 
65  /* since an additional reference is returned */
66  SG_REF(kernel);
67  return (CGaussianARDKernel*)kernel;
68 }
69 
70 float64_t CGaussianARDKernel::compute(int32_t idx_a, int32_t idx_b)
71 {
72  float64_t result=distance(idx_a,idx_b);
73  return CMath::exp(-result);
74 }
75 
77  const TParameter* param, index_t index)
78 {
79  REQUIRE(param, "Param not set\n");
80  REQUIRE(lhs , "Left features not set!\n");
81  REQUIRE(rhs, "Right features not set!\n");
82 
83  if (lhs==rhs)
84  {
85  if (!strcmp(param->m_name, "weights") || !strcmp(param->m_name, "width"))
86  {
87  SGVector<float64_t> derivative(num_lhs);
88  derivative.zero();
89  return derivative;
90  }
91  }
92  else
93  {
94  int32_t length=CMath::min(num_lhs, num_rhs);
95  SGVector<float64_t> derivative(length);
96 
97  for (index_t j=0; j<length; j++)
98  {
99  if (!strcmp(param->m_name, "weights") )
100  {
101  SGVector<float64_t> avec=((CDotFeatures *)lhs)->get_computed_dot_feature_vector(j);
102  SGVector<float64_t> bvec=((CDotFeatures *)rhs)->get_computed_dot_feature_vector(j);
103  derivative[j]=get_parameter_gradient_helper(param,index,j,j,avec,bvec);
104  }
105  else if (!strcmp(param->m_name, "width"))
106  {
107  SGVector<float64_t> avec, bvec;
108  derivative[j]=get_parameter_gradient_helper(param,index,j,j,avec,bvec);
109  }
110  }
111  return derivative;
112  }
113 
114  SG_ERROR("Can't compute derivative wrt %s parameter\n", param->m_name);
115  return SGVector<float64_t>();
116 }
117 
118 
119 float64_t CGaussianARDKernel::get_parameter_gradient_helper(
120  const TParameter* param, index_t index, int32_t idx_a,
121  int32_t idx_b, SGVector<float64_t> avec, SGVector<float64_t> bvec)
122 {
123  REQUIRE(param, "Param not set\n");
124 
125  if (!strcmp(param->m_name, "weights"))
126  {
127  bvec=linalg::add(avec, bvec, 1.0, -1.0);
128  float64_t scale=-kernel(idx_a,idx_b)/m_width;
129  return compute_gradient_helper(bvec, bvec, scale, index);
130  }
131  else if (!strcmp(param->m_name, "width"))
132  {
133  float64_t tmp=kernel(idx_a,idx_b);
134  return -tmp*CMath::log(tmp)/m_width;
135  }
136  else
137  {
138  SG_ERROR("Can't compute derivative wrt %s parameter\n", param->m_name);
139  return 0.0;
140  }
141 }
142 
144  const TParameter* param, index_t index)
145 {
146  REQUIRE(param, "Param not set\n");
147  REQUIRE(lhs , "Left features not set!\n");
148  REQUIRE(rhs, "Right features not set!\n");
149 
150  if (!strcmp(param->m_name, "weights"))
151  {
152  SGMatrix<float64_t> derivative(num_lhs, num_rhs);
153  for (index_t j=0; j<num_lhs; j++)
154  {
155  SGVector<float64_t> avec=((CDotFeatures *)lhs)->get_computed_dot_feature_vector(j);
156  for (index_t k=0; k<num_rhs; k++)
157  {
158  SGVector<float64_t> bvec=((CDotFeatures *)rhs)->get_computed_dot_feature_vector(k);
159  derivative(j,k)=get_parameter_gradient_helper(param,index,j,k,avec,bvec);
160  }
161  }
162  return derivative;
163  }
164  else if (!strcmp(param->m_name, "width"))
165  {
166  SGMatrix<float64_t> derivative(num_lhs, num_rhs);
167 
168  for (index_t j=0; j<num_lhs; j++)
169  {
170  for (index_t k=0; k<num_rhs; k++)
171  {
172  SGVector<float64_t> avec, bvec;
173  derivative(j,k)=get_parameter_gradient_helper(param,index,j,k,avec,bvec);
174  }
175  }
176  return derivative;
177  }
178  else
179  {
180  SG_ERROR("Can't compute derivative wrt %s parameter\n", param->m_name);
181  return SGMatrix<float64_t>();
182  }
183 }
184 
185 float64_t CGaussianARDKernel::distance(int32_t idx_a, int32_t idx_b)
186 {
187  REQUIRE(lhs, "Left features (lhs) not set!\n")
188  REQUIRE(rhs, "Right features (rhs) not set!\n")
189 
190  if (lhs==rhs && idx_a==idx_b)
191  return 0.0;
192 
193  SGVector<float64_t> avec=((CDotFeatures *)lhs)->get_computed_dot_feature_vector(idx_a);
194  SGVector<float64_t> bvec=((CDotFeatures *)rhs)->get_computed_dot_feature_vector(idx_b);
195  avec=linalg::add(avec, bvec, 1.0, -1.0);
196  float64_t result=compute_helper(avec, avec);
197  return result/m_width;
198 }
199 #endif /* HAVE_LINALG_LIB */

SHOGUN Machine Learning Toolbox - Documentation