Coverage Report - weka.classifiers.lazy.LWL
 
Classes in this File Line Coverage Branch Coverage Complexity
LWL
0%
0/195
0%
0/96
3.52
 
 1  
 /*
 2  
  *   This program is free software: you can redistribute it and/or modify
 3  
  *   it under the terms of the GNU General Public License as published by
 4  
  *   the Free Software Foundation, either version 3 of the License, or
 5  
  *   (at your option) any later version.
 6  
  *
 7  
  *   This program is distributed in the hope that it will be useful,
 8  
  *   but WITHOUT ANY WARRANTY; without even the implied warranty of
 9  
  *   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 10  
  *   GNU General Public License for more details.
 11  
  *
 12  
  *   You should have received a copy of the GNU General Public License
 13  
  *   along with this program.  If not, see <http://www.gnu.org/licenses/>.
 14  
  */
 15  
 
 16  
 /*
 17  
  *    LWL.java
 18  
  *    Copyright (C) 1999-2012 University of Waikato, Hamilton, New Zealand
 19  
  *
 20  
  */
 21  
 
 22  
 package weka.classifiers.lazy;
 23  
 
 24  
 import java.util.Enumeration;
 25  
 import java.util.Vector;
 26  
 
 27  
 import weka.classifiers.Classifier;
 28  
 import weka.classifiers.SingleClassifierEnhancer;
 29  
 import weka.classifiers.UpdateableClassifier;
 30  
 import weka.core.Capabilities;
 31  
 import weka.core.Capabilities.Capability;
 32  
 import weka.core.Instance;
 33  
 import weka.core.Instances;
 34  
 import weka.core.Option;
 35  
 import weka.core.RevisionUtils;
 36  
 import weka.core.TechnicalInformation;
 37  
 import weka.core.TechnicalInformation.Field;
 38  
 import weka.core.TechnicalInformation.Type;
 39  
 import weka.core.TechnicalInformationHandler;
 40  
 import weka.core.Utils;
 41  
 import weka.core.WeightedInstancesHandler;
 42  
 import weka.core.neighboursearch.LinearNNSearch;
 43  
 import weka.core.neighboursearch.NearestNeighbourSearch;
 44  
 
 45  
 /**
 46  
  <!-- globalinfo-start -->
 47  
  * Locally weighted learning. Uses an instance-based algorithm to assign instance weights which are then used by a specified WeightedInstancesHandler.<br/>
 48  
  * Can do classification (e.g. using naive Bayes) or regression (e.g. using linear regression).<br/>
 49  
  * <br/>
 50  
  * For more info, see<br/>
 51  
  * <br/>
 52  
  * Eibe Frank, Mark Hall, Bernhard Pfahringer: Locally Weighted Naive Bayes. In: 19th Conference in Uncertainty in Artificial Intelligence, 249-256, 2003.<br/>
 53  
  * <br/>
 54  
  * C. Atkeson, A. Moore, S. Schaal (1996). Locally weighted learning. AI Review..
 55  
  * <p/>
 56  
  <!-- globalinfo-end -->
 57  
  *
 58  
  <!-- technical-bibtex-start -->
 59  
  * BibTeX:
 60  
  * <pre>
 61  
  * &#64;inproceedings{Frank2003,
 62  
  *    author = {Eibe Frank and Mark Hall and Bernhard Pfahringer},
 63  
  *    booktitle = {19th Conference in Uncertainty in Artificial Intelligence},
 64  
  *    pages = {249-256},
 65  
  *    publisher = {Morgan Kaufmann},
 66  
  *    title = {Locally Weighted Naive Bayes},
 67  
  *    year = {2003}
 68  
  * }
 69  
  * 
 70  
  * &#64;article{Atkeson1996,
 71  
  *    author = {C. Atkeson and A. Moore and S. Schaal},
 72  
  *    journal = {AI Review},
 73  
  *    title = {Locally weighted learning},
 74  
  *    year = {1996}
 75  
  * }
 76  
  * </pre>
 77  
  * <p/>
 78  
  <!-- technical-bibtex-end -->
 79  
  *
 80  
  <!-- options-start -->
 81  
  * Valid options are: <p/>
 82  
  * 
 83  
  * <pre> -A
 84  
  *  The nearest neighbour search algorithm to use (default: weka.core.neighboursearch.LinearNNSearch).
 85  
  * </pre>
 86  
  * 
 87  
  * <pre> -K &lt;number of neighbours&gt;
 88  
  *  Set the number of neighbours used to set the kernel bandwidth.
 89  
  *  (default all)</pre>
 90  
  * 
 91  
  * <pre> -U &lt;number of weighting method&gt;
 92  
  *  Set the weighting kernel shape to use. 0=Linear, 1=Epanechnikov,
 93  
  *  2=Tricube, 3=Inverse, 4=Gaussian.
 94  
  *  (default 0 = Linear)</pre>
 95  
  * 
 96  
  * <pre> -D
 97  
  *  If set, classifier is run in debug mode and
 98  
  *  may output additional info to the console</pre>
 99  
  * 
 100  
  * <pre> -W
 101  
  *  Full name of base classifier.
 102  
  *  (default: weka.classifiers.trees.DecisionStump)</pre>
 103  
  * 
 104  
  * <pre> 
 105  
  * Options specific to classifier weka.classifiers.trees.DecisionStump:
 106  
  * </pre>
 107  
  * 
 108  
  * <pre> -D
 109  
  *  If set, classifier is run in debug mode and
 110  
  *  may output additional info to the console</pre>
 111  
  * 
 112  
  <!-- options-end -->
 113  
  *
 114  
  * @author Len Trigg (trigg@cs.waikato.ac.nz)
 115  
  * @author Eibe Frank (eibe@cs.waikato.ac.nz)
 116  
  * @author Ashraf M. Kibriya (amk14[at-the-rate]cs[dot]waikato[dot]ac[dot]nz)
 117  
  * @version $Revision: 8034 $ 
 118  
  */
 119  
 public class LWL 
 120  
   extends SingleClassifierEnhancer
 121  
   implements UpdateableClassifier, WeightedInstancesHandler, 
 122  
              TechnicalInformationHandler {
 123  
 
 124  
   /** for serialization. */
 125  
   static final long serialVersionUID = 1979797405383665815L;
 126  
 
 127  
   /** The training instances used for classification. */
 128  
   protected Instances m_Train;
 129  
     
 130  
   /** The number of neighbours used to select the kernel bandwidth. */
 131  0
   protected int m_kNN = -1;
 132  
 
 133  
   /** The weighting kernel method currently selected. */
 134  0
   protected int m_WeightKernel = LINEAR;
 135  
 
 136  
   /** True if m_kNN should be set to all instances. */
 137  0
   protected boolean m_UseAllK = true;
 138  
   
 139  
   /** The nearest neighbour search algorithm to use. 
 140  
    * (Default: weka.core.neighboursearch.LinearNNSearch) 
 141  
    */
 142  0
   protected NearestNeighbourSearch m_NNSearch =  new LinearNNSearch();
 143  
   
 144  
   /** The available kernel weighting methods. */
 145  
   public static final int LINEAR       = 0;
 146  
   public static final int EPANECHNIKOV = 1;
 147  
   public static final int TRICUBE      = 2;  
 148  
   public static final int INVERSE      = 3;
 149  
   public static final int GAUSS        = 4;
 150  
   public static final int CONSTANT     = 5;
 151  
 
 152  
   /** a ZeroR model in case no model can be built from the data. */
 153  
   protected Classifier m_ZeroR;
 154  
     
 155  
   /**
 156  
    * Returns a string describing classifier.
 157  
    * @return a description suitable for
 158  
    * displaying in the explorer/experimenter gui
 159  
    */
 160  
   public String globalInfo() {
 161  0
     return 
 162  
         "Locally weighted learning. Uses an instance-based algorithm to "
 163  
       + "assign instance weights which are then used by a specified "
 164  
       + "WeightedInstancesHandler.\n"
 165  
       + "Can do classification (e.g. using naive Bayes) or regression "
 166  
       + "(e.g. using linear regression).\n\n"
 167  
       + "For more info, see\n\n"
 168  
       + getTechnicalInformation().toString();
 169  
   }
 170  
 
 171  
   /**
 172  
    * Returns an instance of a TechnicalInformation object, containing 
 173  
    * detailed information about the technical background of this class,
 174  
    * e.g., paper reference or book this class is based on.
 175  
    * 
 176  
    * @return the technical information about this class
 177  
    */
 178  
   public TechnicalInformation getTechnicalInformation() {
 179  
     TechnicalInformation         result;
 180  
     TechnicalInformation         additional;
 181  
     
 182  0
     result = new TechnicalInformation(Type.INPROCEEDINGS);
 183  0
     result.setValue(Field.AUTHOR, "Eibe Frank and Mark Hall and Bernhard Pfahringer");
 184  0
     result.setValue(Field.YEAR, "2003");
 185  0
     result.setValue(Field.TITLE, "Locally Weighted Naive Bayes");
 186  0
     result.setValue(Field.BOOKTITLE, "19th Conference in Uncertainty in Artificial Intelligence");
 187  0
     result.setValue(Field.PAGES, "249-256");
 188  0
     result.setValue(Field.PUBLISHER, "Morgan Kaufmann");
 189  
     
 190  0
     additional = result.add(Type.ARTICLE);
 191  0
     additional.setValue(Field.AUTHOR, "C. Atkeson and A. Moore and S. Schaal");
 192  0
     additional.setValue(Field.YEAR, "1996");
 193  0
     additional.setValue(Field.TITLE, "Locally weighted learning");
 194  0
     additional.setValue(Field.JOURNAL, "AI Review");
 195  
     
 196  0
     return result;
 197  
   }
 198  
     
 199  
   /**
 200  
    * Constructor.
 201  
    */
 202  0
   public LWL() {    
 203  0
     m_Classifier = new weka.classifiers.trees.DecisionStump();
 204  0
   }
 205  
 
 206  
   /**
 207  
    * String describing default classifier.
 208  
    * 
 209  
    * @return the default classifier classname
 210  
    */
 211  
   protected String defaultClassifierString() {
 212  
     
 213  0
     return "weka.classifiers.trees.DecisionStump";
 214  
   }
 215  
 
 216  
   /**
 217  
    * Returns an enumeration of the additional measure names 
 218  
    * produced by the neighbour search algorithm.
 219  
    * @return an enumeration of the measure names
 220  
    */
 221  
   public Enumeration enumerateMeasures() {
 222  0
     return m_NNSearch.enumerateMeasures();
 223  
   }
 224  
   
 225  
   /**
 226  
    * Returns the value of the named measure from the 
 227  
    * neighbour search algorithm.
 228  
    * @param additionalMeasureName the name of the measure to query for its value
 229  
    * @return the value of the named measure
 230  
    * @throws IllegalArgumentException if the named measure is not supported
 231  
    */
 232  
   public double getMeasure(String additionalMeasureName) {
 233  0
     return m_NNSearch.getMeasure(additionalMeasureName);
 234  
   }
 235  
 
 236  
   /**
 237  
    * Returns an enumeration describing the available options.
 238  
    *
 239  
    * @return an enumeration of all the available options.
 240  
    */
 241  
   public Enumeration listOptions() {
 242  
     
 243  0
     Vector newVector = new Vector(3);
 244  0
     newVector.addElement(new Option("\tThe nearest neighbour search " +
 245  
                                     "algorithm to use " +
 246  
                                     "(default: weka.core.neighboursearch.LinearNNSearch).\n",
 247  
                                     "A", 0, "-A"));
 248  0
     newVector.addElement(new Option("\tSet the number of neighbours used to set"
 249  
                                     +" the kernel bandwidth.\n"
 250  
                                     +"\t(default all)",
 251  
                                     "K", 1, "-K <number of neighbours>"));
 252  0
     newVector.addElement(new Option("\tSet the weighting kernel shape to use."
 253  
                                     +" 0=Linear, 1=Epanechnikov,\n"
 254  
                                     +"\t2=Tricube, 3=Inverse, 4=Gaussian.\n"
 255  
                                     +"\t(default 0 = Linear)",
 256  
                                     "U", 1,"-U <number of weighting method>"));
 257  
     
 258  0
     Enumeration enu = super.listOptions();
 259  0
     while (enu.hasMoreElements()) {
 260  0
       newVector.addElement(enu.nextElement());
 261  
     }
 262  
 
 263  0
     return newVector.elements();
 264  
   }
 265  
 
 266  
   /**
 267  
    * Parses a given list of options. <p/>
 268  
    *
 269  
    <!-- options-start -->
 270  
    * Valid options are: <p/>
 271  
    * 
 272  
    * <pre> -A
 273  
    *  The nearest neighbour search algorithm to use (default: weka.core.neighboursearch.LinearNNSearch).
 274  
    * </pre>
 275  
    * 
 276  
    * <pre> -K &lt;number of neighbours&gt;
 277  
    *  Set the number of neighbours used to set the kernel bandwidth.
 278  
    *  (default all)</pre>
 279  
    * 
 280  
    * <pre> -U &lt;number of weighting method&gt;
 281  
    *  Set the weighting kernel shape to use. 0=Linear, 1=Epanechnikov,
 282  
    *  2=Tricube, 3=Inverse, 4=Gaussian.
 283  
    *  (default 0 = Linear)</pre>
 284  
    * 
 285  
    * <pre> -D
 286  
    *  If set, classifier is run in debug mode and
 287  
    *  may output additional info to the console</pre>
 288  
    * 
 289  
    * <pre> -W
 290  
    *  Full name of base classifier.
 291  
    *  (default: weka.classifiers.trees.DecisionStump)</pre>
 292  
    * 
 293  
    * <pre> 
 294  
    * Options specific to classifier weka.classifiers.trees.DecisionStump:
 295  
    * </pre>
 296  
    * 
 297  
    * <pre> -D
 298  
    *  If set, classifier is run in debug mode and
 299  
    *  may output additional info to the console</pre>
 300  
    * 
 301  
    <!-- options-end -->
 302  
    *
 303  
    * @param options the list of options as an array of strings
 304  
    * @throws Exception if an option is not supported
 305  
    */
 306  
   public void setOptions(String[] options) throws Exception {
 307  
 
 308  0
     String knnString = Utils.getOption('K', options);
 309  0
     if (knnString.length() != 0) {
 310  0
       setKNN(Integer.parseInt(knnString));
 311  
     } else {
 312  0
       setKNN(-1);
 313  
     }
 314  
 
 315  0
     String weightString = Utils.getOption('U', options);
 316  0
     if (weightString.length() != 0) {
 317  0
       setWeightingKernel(Integer.parseInt(weightString));
 318  
     } else {
 319  0
       setWeightingKernel(LINEAR);
 320  
     }
 321  
     
 322  0
     String nnSearchClass = Utils.getOption('A', options);
 323  0
     if(nnSearchClass.length() != 0) {
 324  0
       String nnSearchClassSpec[] = Utils.splitOptions(nnSearchClass);
 325  0
       if(nnSearchClassSpec.length == 0) { 
 326  0
         throw new Exception("Invalid NearestNeighbourSearch algorithm " +
 327  
                             "specification string."); 
 328  
       }
 329  0
       String className = nnSearchClassSpec[0];
 330  0
       nnSearchClassSpec[0] = "";
 331  
 
 332  0
       setNearestNeighbourSearchAlgorithm( (NearestNeighbourSearch)
 333  
                   Utils.forName( NearestNeighbourSearch.class, 
 334  
                                  className, 
 335  
                                  nnSearchClassSpec)
 336  
                                         );
 337  0
     }
 338  
     else 
 339  0
       this.setNearestNeighbourSearchAlgorithm(new LinearNNSearch());
 340  
 
 341  0
     super.setOptions(options);
 342  0
   }
 343  
 
 344  
   /**
 345  
    * Gets the current settings of the classifier.
 346  
    *
 347  
    * @return an array of strings suitable for passing to setOptions
 348  
    */
 349  
   public String [] getOptions() {
 350  
 
 351  0
     String [] superOptions = super.getOptions();
 352  0
     String [] options = new String [superOptions.length + 6];
 353  
 
 354  0
     int current = 0;
 355  
 
 356  0
     options[current++] = "-U"; options[current++] = "" + getWeightingKernel();
 357  0
     if ( (getKNN() == 0) && m_UseAllK) {
 358  0
       options[current++] = "-K"; options[current++] = "-1";
 359  
     }
 360  
     else {
 361  0
       options[current++] = "-K"; options[current++] = "" + getKNN();
 362  
     }
 363  0
     options[current++] = "-A";
 364  0
     options[current++] = m_NNSearch.getClass().getName()+" "+Utils.joinOptions(m_NNSearch.getOptions()); 
 365  
 
 366  0
     System.arraycopy(superOptions, 0, options, current,
 367  
                      superOptions.length);
 368  
 
 369  0
     return options;
 370  
   }
 371  
   
 372  
   /**
 373  
    * Returns the tip text for this property.
 374  
    * @return tip text for this property suitable for
 375  
    * displaying in the explorer/experimenter gui
 376  
    */
 377  
   public String KNNTipText() {
 378  0
     return "How many neighbours are used to determine the width of the "
 379  
       + "weighting function (<= 0 means all neighbours).";
 380  
   }
 381  
 
 382  
   /**
 383  
    * Sets the number of neighbours used for kernel bandwidth setting.
 384  
    * The bandwidth is taken as the distance to the kth neighbour.
 385  
    *
 386  
    * @param knn the number of neighbours included inside the kernel
 387  
    * bandwidth, or 0 to specify using all neighbors.
 388  
    */
 389  
   public void setKNN(int knn) {
 390  
 
 391  0
     m_kNN = knn;
 392  0
     if (knn <= 0) {
 393  0
       m_kNN = 0;
 394  0
       m_UseAllK = true;
 395  
     } else {
 396  0
       m_UseAllK = false;
 397  
     }
 398  0
   }
 399  
 
 400  
   /**
 401  
    * Gets the number of neighbours used for kernel bandwidth setting.
 402  
    * The bandwidth is taken as the distance to the kth neighbour.
 403  
    *
 404  
    * @return the number of neighbours included inside the kernel
 405  
    * bandwidth, or 0 for all neighbours
 406  
    */
 407  
   public int getKNN() {
 408  
 
 409  0
     return m_kNN;
 410  
   }
 411  
 
 412  
   /**
 413  
    * Returns the tip text for this property.
 414  
    * @return tip text for this property suitable for
 415  
    * displaying in the explorer/experimenter gui
 416  
    */
 417  
   public String weightingKernelTipText() {
 418  0
     return "Determines weighting function. [0 = Linear, 1 = Epnechnikov,"+
 419  
            "2 = Tricube, 3 = Inverse, 4 = Gaussian and 5 = Constant. "+
 420  
            "(default 0 = Linear)].";
 421  
   }
 422  
 
 423  
   /**
 424  
    * Sets the kernel weighting method to use. Must be one of LINEAR, 
 425  
    * EPANECHNIKOV,  TRICUBE, INVERSE, GAUSS or CONSTANT, other values
 426  
    * are ignored.
 427  
    *
 428  
    * @param kernel the new kernel method to use. Must be one of LINEAR,
 429  
    * EPANECHNIKOV,  TRICUBE, INVERSE, GAUSS or CONSTANT.
 430  
    */
 431  
   public void setWeightingKernel(int kernel) {
 432  
 
 433  0
     if ((kernel != LINEAR)
 434  
         && (kernel != EPANECHNIKOV)
 435  
         && (kernel != TRICUBE)
 436  
         && (kernel != INVERSE)
 437  
         && (kernel != GAUSS)
 438  
         && (kernel != CONSTANT)) {
 439  0
       return;
 440  
     }
 441  0
     m_WeightKernel = kernel;
 442  0
   }
 443  
 
 444  
   /**
 445  
    * Gets the kernel weighting method to use.
 446  
    *
 447  
    * @return the new kernel method to use. Will be one of LINEAR,
 448  
    * EPANECHNIKOV,  TRICUBE, INVERSE, GAUSS or CONSTANT.
 449  
    */
 450  
   public int getWeightingKernel() {
 451  
 
 452  0
     return m_WeightKernel;
 453  
   }
 454  
 
 455  
   /**
 456  
    * Returns the tip text for this property.
 457  
    * @return tip text for this property suitable for
 458  
    * displaying in the explorer/experimenter gui
 459  
    */
 460  
   public String nearestNeighbourSearchAlgorithmTipText() {
 461  0
     return "The nearest neighbour search algorithm to use (Default: LinearNN).";
 462  
   }
 463  
   
 464  
   /**
 465  
    * Returns the current nearestNeighbourSearch algorithm in use.
 466  
    * @return the NearestNeighbourSearch algorithm currently in use.
 467  
    */
 468  
   public NearestNeighbourSearch getNearestNeighbourSearchAlgorithm() {
 469  0
     return m_NNSearch;
 470  
   }
 471  
   
 472  
   /**
 473  
    * Sets the nearestNeighbourSearch algorithm to be used for finding nearest
 474  
    * neighbour(s).
 475  
    * @param nearestNeighbourSearchAlgorithm - The NearestNeighbourSearch class.
 476  
    */
 477  
   public void setNearestNeighbourSearchAlgorithm(NearestNeighbourSearch nearestNeighbourSearchAlgorithm) {
 478  0
     m_NNSearch = nearestNeighbourSearchAlgorithm;
 479  0
   }
 480  
 
 481  
   /**
 482  
    * Returns default capabilities of the classifier.
 483  
    *
 484  
    * @return      the capabilities of this classifier
 485  
    */
 486  
   public Capabilities getCapabilities() {
 487  
     Capabilities      result;
 488  
     
 489  0
     if (m_Classifier != null) {
 490  0
       result = m_Classifier.getCapabilities();
 491  
     } else {
 492  0
       result = super.getCapabilities();
 493  
     }
 494  
     
 495  0
     result.setMinimumNumberInstances(0);
 496  
     
 497  
     // set dependencies
 498  0
     for (Capability cap: Capability.values())
 499  0
       result.enableDependency(cap);
 500  
     
 501  0
     return result;
 502  
   }
 503  
   
 504  
   /**
 505  
    * Generates the classifier.
 506  
    *
 507  
    * @param instances set of instances serving as training data 
 508  
    * @throws Exception if the classifier has not been generated successfully
 509  
    */
 510  
   public void buildClassifier(Instances instances) throws Exception {
 511  
 
 512  0
     if (!(m_Classifier instanceof WeightedInstancesHandler)) {
 513  0
       throw new IllegalArgumentException("Classifier must be a "
 514  
                                          + "WeightedInstancesHandler!");
 515  
     }
 516  
 
 517  
     // can classifier handle the data?
 518  0
     getCapabilities().testWithFail(instances);
 519  
 
 520  
     // remove instances with missing class
 521  0
     instances = new Instances(instances);
 522  0
     instances.deleteWithMissingClass();
 523  
     
 524  
     // only class? -> build ZeroR model
 525  0
     if (instances.numAttributes() == 1) {
 526  0
       System.err.println(
 527  
           "Cannot build model (only class attribute present in data!), "
 528  
           + "using ZeroR model instead!");
 529  0
       m_ZeroR = new weka.classifiers.rules.ZeroR();
 530  0
       m_ZeroR.buildClassifier(instances);
 531  0
       return;
 532  
     }
 533  
     else {
 534  0
       m_ZeroR = null;
 535  
     }
 536  
     
 537  0
     m_Train = new Instances(instances, 0, instances.numInstances());
 538  
 
 539  0
     m_NNSearch.setInstances(m_Train);
 540  0
   }
 541  
 
 542  
   /**
 543  
    * Adds the supplied instance to the training set.
 544  
    *
 545  
    * @param instance the instance to add
 546  
    * @throws Exception if instance could not be incorporated
 547  
    * successfully
 548  
    */
 549  
   public void updateClassifier(Instance instance) throws Exception {
 550  
 
 551  0
     if (m_Train == null) {
 552  0
       throw new Exception("No training instance structure set!");
 553  
     }
 554  0
     else if (m_Train.equalHeaders(instance.dataset()) == false) {
 555  0
       throw new Exception("Incompatible instance types\n" + m_Train.equalHeadersMsg(instance.dataset()));
 556  
     }
 557  0
     if (!instance.classIsMissing()) {
 558  0
       m_NNSearch.update(instance);
 559  0
       m_Train.add(instance);
 560  
     }
 561  0
   }
 562  
   
 563  
   /**
 564  
    * Calculates the class membership probabilities for the given test instance.
 565  
    *
 566  
    * @param instance the instance to be classified
 567  
    * @return preedicted class probability distribution
 568  
    * @throws Exception if distribution can't be computed successfully
 569  
    */
 570  
   public double[] distributionForInstance(Instance instance) throws Exception {
 571  
     
 572  
     // default model?
 573  0
     if (m_ZeroR != null) {
 574  0
       return m_ZeroR.distributionForInstance(instance);
 575  
     }
 576  
     
 577  0
     if (m_Train.numInstances() == 0) {
 578  0
       throw new Exception("No training instances!");
 579  
     }
 580  
     
 581  0
     m_NNSearch.addInstanceInfo(instance);
 582  
     
 583  0
     int k = m_Train.numInstances();
 584  0
     if( (!m_UseAllK && (m_kNN < k)) /*&&
 585  
        !(m_WeightKernel==INVERSE ||
 586  
          m_WeightKernel==GAUSS)*/ ) {
 587  0
       k = m_kNN;
 588  
     }
 589  
     
 590  0
     Instances neighbours = m_NNSearch.kNearestNeighbours(instance, k);
 591  0
     double distances[] = m_NNSearch.getDistances();
 592  
 
 593  0
     if (m_Debug) {
 594  0
       System.out.println("Test Instance: "+instance);
 595  0
       System.out.println("For "+k+" kept " + neighbours.numInstances() + " out of " + 
 596  
                          m_Train.numInstances() + " instances.");
 597  
     }
 598  
     
 599  
     //IF LinearNN has skipped so much that <k neighbours are remaining.
 600  0
     if(k>distances.length)
 601  0
       k = distances.length;
 602  
 
 603  0
     if (m_Debug) {
 604  0
       System.out.println("Instance Distances");
 605  0
       for (int i = 0; i < distances.length; i++) {
 606  0
         System.out.println("" + distances[i]);
 607  
       }
 608  
     }
 609  
 
 610  
     // Determine the bandwidth
 611  0
     double bandwidth = distances[k-1];
 612  
 
 613  
     // Check for bandwidth zero
 614  0
     if (bandwidth <= 0) {
 615  
       //if the kth distance is zero than give all instances the same weight
 616  0
       for(int i=0; i < distances.length; i++)
 617  0
         distances[i] = 1;
 618  
     } else {
 619  
       // Rescale the distances by the bandwidth
 620  0
       for (int i = 0; i < distances.length; i++)
 621  0
         distances[i] = distances[i] / bandwidth;
 622  
     }
 623  
     
 624  
     // Pass the distances through a weighting kernel
 625  0
     for (int i = 0; i < distances.length; i++) {
 626  0
       switch (m_WeightKernel) {
 627  
         case LINEAR:
 628  0
           distances[i] = 1.0001 - distances[i];
 629  0
           break;
 630  
         case EPANECHNIKOV:
 631  0
           distances[i] = 3/4D*(1.0001 - distances[i]*distances[i]);
 632  0
           break;
 633  
         case TRICUBE:
 634  0
           distances[i] = Math.pow( (1.0001 - Math.pow(distances[i], 3)), 3 );
 635  0
           break;
 636  
         case CONSTANT:
 637  
           //System.err.println("using constant kernel");
 638  0
           distances[i] = 1;
 639  0
           break;
 640  
         case INVERSE:
 641  0
           distances[i] = 1.0 / (1.0 + distances[i]);
 642  0
           break;
 643  
         case GAUSS:
 644  0
           distances[i] = Math.exp(-distances[i] * distances[i]);
 645  
           break;
 646  
       }
 647  
     }
 648  
 
 649  0
     if (m_Debug) {
 650  0
       System.out.println("Instance Weights");
 651  0
       for (int i = 0; i < distances.length; i++) {
 652  0
         System.out.println("" + distances[i]);
 653  
       }
 654  
     }
 655  
     
 656  
     // Set the weights on the training data
 657  0
     double sumOfWeights = 0, newSumOfWeights = 0;
 658  0
     for (int i = 0; i < distances.length; i++) {
 659  0
       double weight = distances[i];
 660  0
       Instance inst = (Instance) neighbours.instance(i);
 661  0
       sumOfWeights += inst.weight();
 662  0
       newSumOfWeights += inst.weight() * weight;
 663  0
       inst.setWeight(inst.weight() * weight);
 664  
       //weightedTrain.add(newInst);
 665  
     }
 666  
     
 667  
     // Rescale weights
 668  0
     for (int i = 0; i < neighbours.numInstances(); i++) {
 669  0
       Instance inst = neighbours.instance(i);
 670  0
       inst.setWeight(inst.weight() * sumOfWeights / newSumOfWeights);
 671  
     }
 672  
 
 673  
     // Create a weighted classifier
 674  0
     m_Classifier.buildClassifier(neighbours);
 675  
 
 676  0
     if (m_Debug) {
 677  0
       System.out.println("Classifying test instance: " + instance);
 678  0
       System.out.println("Built base classifier:\n" 
 679  
                          + m_Classifier.toString());
 680  
     }
 681  
 
 682  
     // Return the classifier's predictions
 683  0
     return m_Classifier.distributionForInstance(instance);
 684  
   }
 685  
  
 686  
   /**
 687  
    * Returns a description of this classifier.
 688  
    *
 689  
    * @return a description of this classifier as a string.
 690  
    */
 691  
   public String toString() {
 692  
 
 693  
     // only ZeroR model?
 694  0
     if (m_ZeroR != null) {
 695  0
       StringBuffer buf = new StringBuffer();
 696  0
       buf.append(this.getClass().getName().replaceAll(".*\\.", "") + "\n");
 697  0
       buf.append(this.getClass().getName().replaceAll(".*\\.", "").replaceAll(".", "=") + "\n\n");
 698  0
       buf.append("Warning: No model could be built, hence ZeroR model is used:\n\n");
 699  0
       buf.append(m_ZeroR.toString());
 700  0
       return buf.toString();
 701  
     }
 702  
     
 703  0
     if (m_Train == null) {
 704  0
       return "Locally weighted learning: No model built yet.";
 705  
     }
 706  0
     String result = "Locally weighted learning\n"
 707  
       + "===========================\n";
 708  
 
 709  0
     result += "Using classifier: " + m_Classifier.getClass().getName() + "\n";
 710  
 
 711  0
     switch (m_WeightKernel) {
 712  
     case LINEAR:
 713  0
       result += "Using linear weighting kernels\n";
 714  0
       break;
 715  
     case EPANECHNIKOV:
 716  0
       result += "Using epanechnikov weighting kernels\n";
 717  0
       break;
 718  
     case TRICUBE:
 719  0
       result += "Using tricube weighting kernels\n";
 720  0
       break;
 721  
     case INVERSE:
 722  0
       result += "Using inverse-distance weighting kernels\n";
 723  0
       break;
 724  
     case GAUSS:
 725  0
       result += "Using gaussian weighting kernels\n";
 726  0
       break;
 727  
     case CONSTANT:
 728  0
       result += "Using constant weighting kernels\n";
 729  
       break;
 730  
     }
 731  0
     result += "Using " + (m_UseAllK ? "all" : "" + m_kNN) + " neighbours";
 732  0
     return result;
 733  
   }
 734  
   
 735  
   /**
 736  
    * Returns the revision string.
 737  
    * 
 738  
    * @return                the revision
 739  
    */
 740  
   public String getRevision() {
 741  0
     return RevisionUtils.extract("$Revision: 8034 $");
 742  
   }
 743  
   
 744  
   /**
 745  
    * Main method for testing this class.
 746  
    *
 747  
    * @param argv the options
 748  
    */
 749  
   public static void main(String [] argv) {
 750  0
     runClassifier(new LWL(), argv);
 751  0
   }
 752  
 }