SHOGUN  5.0.0
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Modules
KNN.cpp
Go to the documentation of this file.
1 /*
2  * This program is free software; you can redistribute it and/or modify
3  * it under the terms of the GNU General Public License as published by
4  * the Free Software Foundation; either version 3 of the License, or
5  * (at your option) any later version.
6  *
7  * Written (W) 2006 Christian Gehl
8  * Written (W) 2006-2009 Soeren Sonnenburg
9  * Written (W) 2011 Sergey Lisitsyn
10  * Written (W) 2012 Fernando José Iglesias García, cover tree support
11  * Copyright (C) 2011 Berlin Institute of Technology and Max-Planck-Society
12  */
13 
14 #include <shogun/multiclass/KNN.h>
15 #include <shogun/labels/Labels.h>
18 #include <shogun/lib/Signal.h>
19 #include <shogun/lib/JLCoverTree.h>
20 #include <shogun/lib/Time.h>
21 #include <shogun/base/Parameter.h>
24 
25 #ifdef HAVE_CXX11
26 #include <shogun/lib/external/falconn/lsh_nn_table.h>
27 #endif
28 
29 //#define DEBUG_KNN
30 
31 using namespace shogun;
32 using namespace Eigen;
33 
36 {
37  init();
38 }
39 
40 CKNN::CKNN(int32_t k, CDistance* d, CLabels* trainlab, KNN_SOLVER knn_solver)
42 {
43  init();
44 
45  m_k=k;
46 
47  ASSERT(d)
48  ASSERT(trainlab)
49 
50  set_distance(d);
51  set_labels(trainlab);
53  m_knn_solver=knn_solver;
54 }
55 
56 void CKNN::init()
57 {
58  /* do not store model features by default (CDistanceMachine::apply(...) is
59  * overwritten */
61 
62  m_k=3;
63  m_q=1.0;
64  m_num_classes=0;
65  m_leaf_size=1;
67 #ifdef HAVE_CXX11
68  m_lsh_l = 0;
69  m_lsh_t = 0;
70 #endif
71 
72  /* use the method classify_multiply_k to experiment with different values
73  * of k */
74  SG_ADD(&m_k, "m_k", "Parameter k", MS_NOT_AVAILABLE);
75  SG_ADD(&m_q, "m_q", "Parameter q", MS_AVAILABLE);
76  SG_ADD(&m_num_classes, "m_num_classes", "Number of classes", MS_NOT_AVAILABLE);
77  SG_ADD(&m_leaf_size, "m_leaf_size", "Leaf size for KDTree", MS_NOT_AVAILABLE);
78  SG_ADD((machine_int_t*) &m_knn_solver, "m_knn_solver", "Algorithm to solve knn", MS_NOT_AVAILABLE);
79 }
80 
82 {
83 }
84 
86 {
89 
90  if (data)
91  {
92  if (m_labels->get_num_labels() != data->get_num_vectors())
93  SG_ERROR("Number of training vectors does not match number of labels\n")
94  distance->init(data, data);
95  }
96 
97  SGVector<int32_t> lab=((CMulticlassLabels*) m_labels)->get_int_labels();
98  m_train_labels=lab.clone();
100 
101  int32_t max_class=m_train_labels[0];
102  int32_t min_class=m_train_labels[0];
103 
104  for (int32_t i=1; i<m_train_labels.vlen; i++)
105  {
106  max_class=CMath::max(max_class, m_train_labels[i]);
107  min_class=CMath::min(min_class, m_train_labels[i]);
108  }
109 
110  for (int32_t i=0; i<m_train_labels.vlen; i++)
111  m_train_labels[i]-=min_class;
112 
113  m_min_label=min_class;
114  m_num_classes=max_class-min_class+1;
115 
116  SG_INFO("m_num_classes: %d (%+d to %+d) num_train: %d\n", m_num_classes,
117  min_class, max_class, m_train_labels.vlen);
118 
119  return true;
120 }
121 
123 {
124  //number of examples to which kNN is applied
125  int32_t n=distance->get_num_vec_rhs();
126  //distances to train data
127  float64_t* dists=SG_MALLOC(float64_t, m_train_labels.vlen);
128  //indices to train data
129  index_t* train_idxs=SG_MALLOC(index_t, m_train_labels.vlen);
130  //pre-allocation of the nearest neighbors
131  SGMatrix<index_t> NN(m_k, n);
132 
135 
136  //for each test example
137  for (int32_t i=0; i<n && (!CSignal::cancel_computations()); i++)
138  {
139  SG_PROGRESS(i, 0, n)
140 
141  //lhs idx 0..num train examples-1 (i.e., all train examples) and rhs idx i
142  distances_lhs(dists,0,m_train_labels.vlen-1,i);
143 
144  //fill in an array with 0..num train examples-1
145  for (int32_t j=0; j<m_train_labels.vlen; j++)
146  train_idxs[j]=j;
147 
148  //sort the distance vector between test example i and all train examples
149  CMath::qsort_index(dists, train_idxs, m_train_labels.vlen);
150 
151 #ifdef DEBUG_KNN
152  SG_PRINT("\nQuick sort query %d\n", i)
153  for (int32_t j=0; j<m_k; j++)
154  SG_PRINT("%d ", train_idxs[j])
155  SG_PRINT("\n")
156 #endif
157 
158  //fill in the output the indices of the nearest neighbors
159  for (int32_t j=0; j<m_k; j++)
160  NN(j,i) = train_idxs[j];
161  }
162 
164 
165  SG_FREE(train_idxs);
166  SG_FREE(dists);
167 
168  return NN;
169 }
170 
172 {
173  if (data)
174  init_distance(data);
175 
176  //redirecting to fast (without sorting) classify if k==1
177  if (m_k == 1)
178  return classify_NN();
179 
183 
184  int32_t num_lab=distance->get_num_vec_rhs();
185  ASSERT(m_k<=distance->get_num_vec_lhs())
186 
187  CMulticlassLabels* output=new CMulticlassLabels(num_lab);
188 
189  //labels of the k nearest neighbors
190  int32_t* train_lab=SG_MALLOC(int32_t, m_k);
191 
192  SG_INFO("%d test examples\n", num_lab)
194 
195  //histogram of classes and returned output
196  float64_t* classes=SG_MALLOC(float64_t, m_num_classes);
197 
198  switch (m_knn_solver)
199  {
200  case KNN_BRUTE:
201  {
202  //get the k nearest neighbors of each example
204 
205  //from the indices to the nearest neighbors, compute the class labels
206  for (int32_t i=0; i<num_lab && (!CSignal::cancel_computations()); i++)
207  {
208  //write the labels of the k nearest neighbors from theirs indices
209  for (int32_t j=0; j<m_k; j++)
210  train_lab[j] = m_train_labels[ NN(j,i) ];
211 
212  //get the index of the 'nearest' class
213  int32_t out_idx = choose_class(classes, train_lab);
214  //write the label of 'nearest' in the output
215  output->set_label(i, out_idx + m_min_label);
216  }
217 
218  break;
219  }
220  case KNN_COVER_TREE: // Use cover tree
221  {
222  // m_q != 1.0 not supported with cover tree because the neighbors
223  // are not retrieved in increasing order of distance to the query
224  float64_t old_q = m_q;
225  if ( old_q != 1.0 )
226  SG_INFO("q != 1.0 not supported with cover tree, using q = 1\n")
227 
228  // From the sets of features (lhs and rhs) stored in distance,
229  // build arrays of cover tree points
230  v_array< CJLCoverTreePoint > set_of_points =
232  v_array< CJLCoverTreePoint > set_of_queries =
234 
235  // Build the cover trees, one for the test vectors (rhs features)
236  // and another for the training vectors (lhs features)
238  node< CJLCoverTreePoint > top = batch_create(set_of_points);
239  CFeatures* l = distance->replace_lhs(r);
240  distance->replace_rhs(r);
241  node< CJLCoverTreePoint > top_query = batch_create(set_of_queries);
242 
243  // Get the k nearest neighbors to all the test vectors (batch method)
244  distance->replace_lhs(l);
246  k_nearest_neighbor(top, top_query, res, m_k);
247 
248 #ifdef DEBUG_KNN
249  SG_PRINT("\nJL Results:\n")
250  for ( int32_t i = 0 ; i < res.index ; ++i )
251  {
252  for ( int32_t j = 0 ; j < res[i].index ; ++j )
253  {
254  printf("%d ", res[i][j].m_index);
255  }
256  printf("\n");
257  }
258  SG_PRINT("\n")
259 #endif
260 
261  for ( int32_t i = 0 ; i < res.index ; ++i )
262  {
263  // Translate from indices to labels of the nearest neighbors
264  for ( int32_t j = 0; j < m_k; ++j )
265  // The first index in res[i] points to the test vector
266  train_lab[j] = m_train_labels.vector[ res[i][j+1].m_index ];
267 
268  // Get the index of the 'nearest' class
269  int32_t out_idx = choose_class(classes, train_lab);
270  output->set_label(res[i][0].m_index, out_idx+m_min_label);
271  }
272 
273  m_q = old_q;
274 
275  break;
276  }
277  case KNN_KDTREE:
278  {
279  CFeatures* lhs = distance->get_lhs();
280  CKDTree* kd_tree = new CKDTree(m_leaf_size);
281  kd_tree->build_tree(dynamic_cast<CDenseFeatures<float64_t>*>(lhs));
282  SG_UNREF(lhs);
283 
284  CFeatures* query = distance->get_rhs();
285  kd_tree->query_knn(dynamic_cast<CDenseFeatures<float64_t>*>(query), m_k);
286  SGMatrix<index_t> NN = kd_tree->get_knn_indices();
287  for (int32_t i=0; i<num_lab && (!CSignal::cancel_computations()); i++)
288  {
289  //write the labels of the k nearest neighbors from theirs indices
290  for (int32_t j=0; j<m_k; j++)
291  train_lab[j] = m_train_labels[ NN(j,i) ];
292 
293  //get the index of the 'nearest' class
294  int32_t out_idx = choose_class(classes, train_lab);
295  //write the label of 'nearest' in the output
296  output->set_label(i, out_idx + m_min_label);
297  }
298  SG_UNREF(query);
299  break;
300  }
301 #ifdef HAVE_CXX11
302  case KNN_LSH:
303  {
305  std::vector<falconn::DenseVector<double>> feats;
306  for(int32_t i=0; i < features->get_num_vectors(); i++)
307  {
308  int32_t len;
309  bool free;
310  float64_t* vec = features->get_feature_vector(i, len, free);
311  falconn::DenseVector<double> temp = Map<VectorXd> (vec, len);
312  feats.push_back(temp);
313  }
314 
315  falconn::LSHConstructionParameters params
316  = falconn::get_default_parameters<falconn::DenseVector<double>>(features->get_num_vectors(),
317  features->get_num_features(),
318  falconn::DistanceFunction::EuclideanSquared,
319  true);
320  SG_UNREF(features);
321  if (m_lsh_l && m_lsh_t)
322  params.l = m_lsh_l;
323 
324  auto lsh_table = falconn::construct_table<falconn::DenseVector<double>>(feats, params);
325  if (m_lsh_t)
326  lsh_table->set_num_probes(m_lsh_t);
327 
328  CDenseFeatures<float64_t>* query_features = dynamic_cast<CDenseFeatures<float64_t>*>(distance->get_rhs());
329  std::vector<falconn::DenseVector<double>> query_feats;
330 
331  SGMatrix<index_t> NN (m_k, query_features->get_num_vectors());
332  for(int32_t i=0; i < query_features->get_num_vectors(); i++)
333  {
334  int32_t len;
335  bool free;
336  float64_t* vec = query_features->get_feature_vector(i, len, free);
337  falconn::DenseVector<double> temp = Map<VectorXd> (vec, len);
338  auto indices = new std::vector<int32_t> ();
339  lsh_table->find_k_nearest_neighbors(temp, (int_fast64_t)m_k, indices);
340  memcpy(NN.get_column_vector(i), indices->data(), sizeof(int32_t)*m_k);
341  delete indices;
342  }
343 
344  for (int32_t i=0; i<num_lab && (!CSignal::cancel_computations()); i++)
345  {
346  //write the labels of the k nearest neighbors from theirs indices
347  for (int32_t j=0; j<m_k; j++)
348  train_lab[j] = m_train_labels[ NN(j,i) ];
349 
350  //get the index of the 'nearest' class
351  int32_t out_idx = choose_class(classes, train_lab);
352  //write the label of 'nearest' in the output
353  output->set_label(i, out_idx + m_min_label);
354  }
355  SG_UNREF(query_features);
356  break;
357  }
358 #endif /* HAVE_CXX11 */
359  }
360 
361  SG_FREE(classes);
362  SG_FREE(train_lab);
363 
364  return output;
365 }
366 
368 {
371 
372  int32_t num_lab = distance->get_num_vec_rhs();
373  ASSERT(num_lab)
374 
375  CMulticlassLabels* output = new CMulticlassLabels(num_lab);
376  float64_t* distances = SG_MALLOC(float64_t, m_train_labels.vlen);
377 
378  SG_INFO("%d test examples\n", num_lab)
380 
382 
383  // for each test example
384  for (int32_t i=0; i<num_lab && (!CSignal::cancel_computations()); i++)
385  {
386  SG_PROGRESS(i,0,num_lab)
387 
388  // get distances from i-th test example to 0..num_m_train_labels-1 train examples
389  distances_lhs(distances,0,m_train_labels.vlen-1,i);
390  int32_t j;
391 
392  // assuming 0th train examples as nearest to i-th test example
393  int32_t out_idx = 0;
394  float64_t min_dist = distances[0];
395 
396  // searching for nearest neighbor by comparing distances
397  for (j=0; j<m_train_labels.vlen; j++)
398  {
399  if (distances[j]<min_dist)
400  {
401  min_dist = distances[j];
402  out_idx = j;
403  }
404  }
405 
406  // label i-th test example with label of nearest neighbor with out_idx index
407  output->set_label(i,m_train_labels.vector[out_idx]+m_min_label);
408  }
409 
411 
412  SG_FREE(distances);
413  return output;
414 }
415 
417 {
421 
422  int32_t num_lab=distance->get_num_vec_rhs();
423  ASSERT(m_k<=num_lab)
424 
425  int32_t* output=SG_MALLOC(int32_t, m_k*num_lab);
426 
427  //working buffer of m_train_labels
428  int32_t* train_lab=SG_MALLOC(int32_t, m_k);
429 
430  //histogram of classes and returned output
431  int32_t* classes=SG_MALLOC(int32_t, m_num_classes);
432 
433  SG_INFO("%d test examples\n", num_lab)
435 
436  switch (m_knn_solver)
437  {
438  case KNN_COVER_TREE: // Use cover tree
439  {
440  //allocation for distances to nearest neighbors
441  float64_t* dists=SG_MALLOC(float64_t, m_k);
442 
443  // From the sets of features (lhs and rhs) stored in distance,
444  // build arrays of cover tree points
445  v_array< CJLCoverTreePoint > set_of_points =
447  v_array< CJLCoverTreePoint > set_of_queries =
449 
450  // Build the cover trees, one for the test vectors (rhs features)
451  // and another for the training vectors (lhs features)
453  node< CJLCoverTreePoint > top = batch_create(set_of_points);
454  CFeatures* l = distance->replace_lhs(r);
455  distance->replace_rhs(r);
456  node< CJLCoverTreePoint > top_query = batch_create(set_of_queries);
457 
458  // Get the k nearest neighbors to all the test vectors (batch method)
459  distance->replace_lhs(l);
461  k_nearest_neighbor(top, top_query, res, m_k);
462 
463  for ( int32_t i = 0 ; i < res.index ; ++i )
464  {
465  // Handle the fact that cover tree doesn't return neighbors
466  // ordered by distance
467 
468  for ( int32_t j = 0 ; j < m_k ; ++j )
469  {
470  // The first index in res[i] points to the test vector
471  dists[j] = distance->distance(res[i][j+1].m_index,
472  res[i][0].m_index);
473  train_lab[j] = m_train_labels.vector[
474  res[i][j+1].m_index ];
475  }
476 
477  // Now we get the indices to the neighbors sorted by distance
478  CMath::qsort_index(dists, train_lab, m_k);
479 
480  choose_class_for_multiple_k(output+res[i][0].m_index, classes,
481  train_lab, num_lab);
482  }
483 
484  SG_FREE(dists);
485  break;
486  }
487  case KNN_KDTREE:
488  {
489  //allocation for distances to nearest neighbors
490  float64_t* dists=SG_MALLOC(float64_t, m_k);
491 
492  CFeatures* lhs = distance->get_lhs();
493  CKDTree* kd_tree = new CKDTree(m_leaf_size);
494  kd_tree->build_tree(dynamic_cast<CDenseFeatures<float64_t>*>(lhs));
495  SG_UNREF(lhs);
496 
497  CFeatures* data = distance->get_rhs();
498  kd_tree->query_knn(dynamic_cast<CDenseFeatures<float64_t>*>(data), m_k);
499  SGMatrix<index_t> NN = kd_tree->get_knn_indices();
500  for (int32_t i=0; i<num_lab && (!CSignal::cancel_computations()); i++)
501  {
502  //write the labels of the k nearest neighbors from theirs indices
503  for (int32_t j=0; j<m_k; j++)
504  {
505  train_lab[j] = m_train_labels[ NN(j,i) ];
506  dists[j] = distance->distance(i, NN(j,i));
507  }
508  CMath::qsort_index(dists, train_lab, m_k);
509 
510  choose_class_for_multiple_k(output+i, classes, train_lab, num_lab);
511  }
512  break;
513  }
514  default:
515  {
516  //get the k nearest neighbors of each example
518 
519  for (int32_t i=0; i<num_lab && (!CSignal::cancel_computations()); i++)
520  {
521  //write the labels of the k nearest neighbors from theirs indices
522  for (int32_t j=0; j<m_k; j++)
523  train_lab[j] = m_train_labels[ NN(j,i) ];
524 
525  choose_class_for_multiple_k(output+i, classes, train_lab, num_lab);
526  }
527 
528  }
529 
530  }
531 
532  SG_FREE(train_lab);
533  SG_FREE(classes);
534 
535  return SGMatrix<int32_t>(output,num_lab,m_k,true);
536 }
537 
539 {
540  if (!distance)
541  SG_ERROR("No distance assigned!\n")
542  CFeatures* lhs=distance->get_lhs();
543  if (!lhs || !lhs->get_num_vectors())
544  {
545  SG_UNREF(lhs);
546  SG_ERROR("No vectors on left hand side\n")
547  }
548  distance->init(lhs, data);
549  SG_UNREF(lhs);
550 }
551 
552 bool CKNN::load(FILE* srcfile)
553 {
556  return false;
557 }
558 
559 bool CKNN::save(FILE* dstfile)
560 {
563  return false;
564 }
565 
567 {
568  CFeatures* d_lhs=distance->get_lhs();
569  CFeatures* d_rhs=distance->get_rhs();
570 
571  /* copy lhs of underlying distance */
572  distance->init(d_lhs->duplicate(), d_rhs);
573 
574  SG_UNREF(d_lhs);
575  SG_UNREF(d_rhs);
576 }
577 
578 int32_t CKNN::choose_class(float64_t* classes, int32_t* train_lab)
579 {
580  memset(classes, 0, sizeof(float64_t)*m_num_classes);
581 
582  float64_t multiplier = m_q;
583  for (int32_t j=0; j<m_k; j++)
584  {
585  classes[train_lab[j]]+= multiplier;
586  multiplier*= multiplier;
587  }
588 
589  //choose the class that got 'outputted' most often
590  int32_t out_idx=0;
591  float64_t out_max=0;
592 
593  for (int32_t j=0; j<m_num_classes; j++)
594  {
595  if (out_max< classes[j])
596  {
597  out_idx= j;
598  out_max= classes[j];
599  }
600  }
601 
602  return out_idx;
603 }
604 
605 void CKNN::choose_class_for_multiple_k(int32_t* output, int32_t* classes, int32_t* train_lab, int32_t step)
606 {
607  //compute histogram of class outputs of the first k nearest neighbours
608  memset(classes, 0, sizeof(int32_t)*m_num_classes);
609 
610  for (int32_t j=0; j<m_k; j++)
611  {
612  classes[train_lab[j]]++;
613 
614  //choose the class that got 'outputted' most often
615  int32_t out_idx=0;
616  int32_t out_max=0;
617 
618  for (int32_t c=0; c<m_num_classes; c++)
619  {
620  if (out_max< classes[c])
621  {
622  out_idx= c;
623  out_max= classes[c];
624  }
625  }
626 
627  output[j*step]=out_idx+m_min_label;
628  }
629 }
virtual void store_model_features()
Definition: KNN.cpp:566
#define SG_INFO(...)
Definition: SGIO.h:118
#define SG_RESET_LOCALE
Definition: SGIO.h:86
virtual bool save(FILE *dstfile)
Definition: KNN.cpp:559
ST * get_feature_vector(int32_t num, int32_t &len, bool &dofree)
Class Distance, a base class for all the distances used in the Shogun toolbox.
Definition: Distance.h:87
int32_t get_num_features() const
virtual void reset_precompute()
Definition: Distance.h:150
int32_t index_t
Definition: common.h:62
#define SG_PROGRESS(...)
Definition: SGIO.h:142
void init_distance(CFeatures *data)
Definition: KNN.cpp:538
KNN_SOLVER m_knn_solver
Definition: KNN.h:288
CFeatures * get_lhs()
Definition: Distance.h:224
The class Labels models labels, i.e. class assignments of objects.
Definition: Labels.h:43
SGMatrix< index_t > get_knn_indices()
Definition: NbodyTree.cpp:155
virtual int32_t get_num_labels() const =0
static void qsort_index(T1 *output, T2 *index, uint32_t size)
Definition: Math.h:2202
node< P > batch_create(v_array< P > points)
Definition: JLCoverTree.h:295
virtual CFeatures * replace_lhs(CFeatures *lhs)
Definition: Distance.cpp:188
SGMatrix< int32_t > classify_for_multiple_k()
Definition: KNN.cpp:416
Class v_array taken directly from JL's implementation.
Definition: SGMatrix.h:20
KNN_SOLVER
Definition: KNN.h:27
virtual int32_t get_num_vectors() const =0
CLabels * m_labels
Definition: Machine.h:361
void distances_lhs(float64_t *result, int32_t idx_a1, int32_t idx_a2, int32_t idx_b)
#define SG_ERROR(...)
Definition: SGIO.h:129
CFeatures * get_rhs()
Definition: Distance.h:230
virtual CFeatures * duplicate() const =0
int32_t m_min_label
smallest label, i.e. -1
Definition: KNN.h:283
virtual bool train_machine(CFeatures *data=NULL)
Definition: KNN.cpp:85
SGMatrix< index_t > nearest_neighbors()
Definition: KNN.cpp:122
#define SG_SET_LOCALE_C
Definition: SGIO.h:85
void build_tree(CDenseFeatures< float64_t > *data)
Definition: NbodyTree.cpp:45
A generic DistanceMachine interface.
bool set_label(int32_t idx, float64_t label)
virtual bool load(FILE *srcfile)
Definition: KNN.cpp:552
v_array< CJLCoverTreePoint > parse_points(CDistance *distance, EFeaturesContainer fc)
int32_t m_num_classes
number of classes (i.e. number of values labels can take)
Definition: KNN.h:280
Multiclass Labels for multi-class classification.
index_t vlen
Definition: SGVector.h:494
int32_t m_k
the k parameter in KNN
Definition: KNN.h:274
void query_knn(CDenseFeatures< float64_t > *data, int32_t k)
Definition: NbodyTree.cpp:59
#define SG_PRINT(...)
Definition: SGIO.h:137
virtual void set_store_model_features(bool store_model)
Definition: Machine.cpp:107
#define ASSERT(x)
Definition: SGIO.h:201
virtual int32_t get_num_vectors() const
static void clear_cancel()
Definition: Signal.cpp:129
double float64_t
Definition: common.h:50
static T max(T a, T b)
Definition: Math.h:168
virtual int32_t get_num_vec_rhs()
Definition: Distance.h:321
This class implements KD-Tree. cf. http://www.autonlab.org/autonweb/14665/version/2/part/5/data/moore...
Definition: KDTree.h:45
static bool cancel_computations()
Definition: Signal.h:86
virtual CFeatures * replace_rhs(CFeatures *rhs)
Definition: Distance.cpp:170
float64_t m_q
parameter q of rank weighting
Definition: KNN.h:277
SGVector< int32_t > m_train_labels
Definition: KNN.h:286
virtual float64_t distance(int32_t idx_a, int32_t idx_b)
Definition: Distance.cpp:206
#define SG_UNREF(x)
Definition: SGObject.h:55
all of classes and functions are contained in the shogun namespace
Definition: class_list.h:18
virtual ~CKNN()
Definition: KNN.cpp:81
int machine_int_t
Definition: common.h:59
The class Features is the base class of all feature objects.
Definition: Features.h:68
static T min(T a, T b)
Definition: Math.h:157
void set_distance(CDistance *d)
virtual void precompute_lhs()
Definition: Distance.h:143
SGVector< T > clone() const
Definition: SGVector.cpp:207
virtual CMulticlassLabels * classify_NN()
Definition: KNN.cpp:367
virtual CMulticlassLabels * apply_multiclass(CFeatures *data=NULL)
Definition: KNN.cpp:171
virtual void precompute_rhs()
Definition: Distance.h:135
void k_nearest_neighbor(const node< P > &top_node, const node< P > &query, v_array< v_array< P > > &results, int k)
Definition: JLCoverTree.h:828
#define SG_ADD(...)
Definition: SGObject.h:84
virtual bool init(CFeatures *lhs, CFeatures *rhs)
Definition: Distance.cpp:78
virtual void set_labels(CLabels *lab)
Definition: Machine.cpp:65
int32_t m_leaf_size
Definition: KNN.h:290

SHOGUN Machine Learning Toolbox - Documentation