Coverage Report - weka.classifiers.meta.AttributeSelectedClassifier
 
Classes in this File Line Coverage Branch Coverage Complexity
AttributeSelectedClassifier
0%
0/157
0%
0/66
2.852
 
 1  
 /*
 2  
  *   This program is free software: you can redistribute it and/or modify
 3  
  *   it under the terms of the GNU General Public License as published by
 4  
  *   the Free Software Foundation, either version 3 of the License, or
 5  
  *   (at your option) any later version.
 6  
  *
 7  
  *   This program is distributed in the hope that it will be useful,
 8  
  *   but WITHOUT ANY WARRANTY; without even the implied warranty of
 9  
  *   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 10  
  *   GNU General Public License for more details.
 11  
  *
 12  
  *   You should have received a copy of the GNU General Public License
 13  
  *   along with this program.  If not, see <http://www.gnu.org/licenses/>.
 14  
  */
 15  
 
 16  
 /*
 17  
  *    AttributeSelectedClassifier.java
 18  
  *    Copyright (C) 2000-2012 University of Waikato, Hamilton, New Zealand
 19  
  *
 20  
  */
 21  
 
 22  
 package weka.classifiers.meta;
 23  
 
 24  
 import java.util.Enumeration;
 25  
 import java.util.Random;
 26  
 import java.util.Vector;
 27  
 
 28  
 import weka.attributeSelection.ASEvaluation;
 29  
 import weka.attributeSelection.ASSearch;
 30  
 import weka.attributeSelection.AttributeSelection;
 31  
 import weka.classifiers.SingleClassifierEnhancer;
 32  
 import weka.core.AdditionalMeasureProducer;
 33  
 import weka.core.Capabilities;
 34  
 import weka.core.Capabilities.Capability;
 35  
 import weka.core.Drawable;
 36  
 import weka.core.Instance;
 37  
 import weka.core.Instances;
 38  
 import weka.core.Option;
 39  
 import weka.core.OptionHandler;
 40  
 import weka.core.RevisionUtils;
 41  
 import weka.core.Utils;
 42  
 import weka.core.WeightedInstancesHandler;
 43  
 
 44  
 /**
 45  
  <!-- globalinfo-start -->
 46  
  * Dimensionality of training and test data is reduced by attribute selection before being passed on to a classifier.
 47  
  * <p/>
 48  
  <!-- globalinfo-end -->
 49  
  *
 50  
  <!-- options-start -->
 51  
  * Valid options are: <p/>
 52  
  * 
 53  
  * <pre> -E &lt;attribute evaluator specification&gt;
 54  
  *  Full class name of attribute evaluator, followed
 55  
  *  by its options.
 56  
  *  eg: "weka.attributeSelection.CfsSubsetEval -L"
 57  
  *  (default weka.attributeSelection.CfsSubsetEval)</pre>
 58  
  * 
 59  
  * <pre> -S &lt;search method specification&gt;
 60  
  *  Full class name of search method, followed
 61  
  *  by its options.
 62  
  *  eg: "weka.attributeSelection.BestFirst -D 1"
 63  
  *  (default weka.attributeSelection.BestFirst)</pre>
 64  
  * 
 65  
  * <pre> -D
 66  
  *  If set, classifier is run in debug mode and
 67  
  *  may output additional info to the console</pre>
 68  
  * 
 69  
  * <pre> -W
 70  
  *  Full name of base classifier.
 71  
  *  (default: weka.classifiers.trees.J48)</pre>
 72  
  * 
 73  
  * <pre> 
 74  
  * Options specific to classifier weka.classifiers.trees.J48:
 75  
  * </pre>
 76  
  * 
 77  
  * <pre> -U
 78  
  *  Use unpruned tree.</pre>
 79  
  * 
 80  
  * <pre> -C &lt;pruning confidence&gt;
 81  
  *  Set confidence threshold for pruning.
 82  
  *  (default 0.25)</pre>
 83  
  * 
 84  
  * <pre> -M &lt;minimum number of instances&gt;
 85  
  *  Set minimum number of instances per leaf.
 86  
  *  (default 2)</pre>
 87  
  * 
 88  
  * <pre> -R
 89  
  *  Use reduced error pruning.</pre>
 90  
  * 
 91  
  * <pre> -N &lt;number of folds&gt;
 92  
  *  Set number of folds for reduced error
 93  
  *  pruning. One fold is used as pruning set.
 94  
  *  (default 3)</pre>
 95  
  * 
 96  
  * <pre> -B
 97  
  *  Use binary splits only.</pre>
 98  
  * 
 99  
  * <pre> -S
 100  
  *  Don't perform subtree raising.</pre>
 101  
  * 
 102  
  * <pre> -L
 103  
  *  Do not clean up after the tree has been built.</pre>
 104  
  * 
 105  
  * <pre> -A
 106  
  *  Laplace smoothing for predicted probabilities.</pre>
 107  
  * 
 108  
  * <pre> -Q &lt;seed&gt;
 109  
  *  Seed for random data shuffling (default 1).</pre>
 110  
  * 
 111  
  <!-- options-end -->
 112  
  *
 113  
  * @author Mark Hall (mhall@cs.waikato.ac.nz)
 114  
  * @version $Revision: 8034 $
 115  
  */
 116  
 public class AttributeSelectedClassifier 
 117  
   extends SingleClassifierEnhancer
 118  
   implements OptionHandler, Drawable, AdditionalMeasureProducer,
 119  
              WeightedInstancesHandler {
 120  
 
 121  
   /** for serialization */
 122  
   static final long serialVersionUID = -5951805453487947577L;
 123  
   
 124  
   /** The attribute selection object */
 125  0
   protected AttributeSelection m_AttributeSelection = null;
 126  
 
 127  
   /** The attribute evaluator to use */
 128  0
   protected ASEvaluation m_Evaluator = 
 129  
     new weka.attributeSelection.CfsSubsetEval();
 130  
 
 131  
   /** The search method to use */
 132  0
   protected ASSearch m_Search = new weka.attributeSelection.BestFirst();
 133  
 
 134  
   /** The header of the dimensionally reduced data */
 135  
   protected Instances m_ReducedHeader;
 136  
 
 137  
   /** The number of class vals in the training data (1 if class is numeric) */
 138  
   protected int m_numClasses;
 139  
 
 140  
   /** The number of attributes selected by the attribute selection phase */
 141  
   protected double m_numAttributesSelected;
 142  
 
 143  
   /** The time taken to select attributes in milliseconds */
 144  
   protected double m_selectionTime;
 145  
 
 146  
   /** The time taken to select attributes AND build the classifier */
 147  
   protected double m_totalTime;
 148  
 
 149  
   
 150  
   /**
 151  
    * String describing default classifier.
 152  
    * 
 153  
    * @return the default classifier classname
 154  
    */
 155  
   protected String defaultClassifierString() {
 156  
     
 157  0
     return "weka.classifiers.trees.J48";
 158  
   }
 159  
   
 160  
   /**
 161  
    * Default constructor.
 162  
    */
 163  0
   public AttributeSelectedClassifier() {
 164  0
     m_Classifier = new weka.classifiers.trees.J48();
 165  0
   }
 166  
 
 167  
   /**
 168  
    * Returns a string describing this search method
 169  
    * @return a description of the search method suitable for
 170  
    * displaying in the explorer/experimenter gui
 171  
    */
 172  
   public String globalInfo() {
 173  0
     return "Dimensionality of training and test data is reduced by "
 174  
       +"attribute selection before being passed on to a classifier.";
 175  
   }
 176  
 
 177  
   /**
 178  
    * Returns an enumeration describing the available options.
 179  
    *
 180  
    * @return an enumeration of all the available options.
 181  
    */
 182  
   public Enumeration listOptions() {
 183  0
      Vector newVector = new Vector(3);
 184  
     
 185  0
     newVector.addElement(new Option(
 186  
               "\tFull class name of attribute evaluator, followed\n"
 187  
               + "\tby its options.\n"
 188  
               + "\teg: \"weka.attributeSelection.CfsSubsetEval -L\"\n"
 189  
               + "\t(default weka.attributeSelection.CfsSubsetEval)",
 190  
               "E", 1, "-E <attribute evaluator specification>"));
 191  
 
 192  0
     newVector.addElement(new Option(
 193  
               "\tFull class name of search method, followed\n"
 194  
               + "\tby its options.\n"
 195  
               + "\teg: \"weka.attributeSelection.BestFirst -D 1\"\n"
 196  
               + "\t(default weka.attributeSelection.BestFirst)",
 197  
               "S", 1, "-S <search method specification>"));
 198  
     
 199  0
     Enumeration enu = super.listOptions();
 200  0
     while (enu.hasMoreElements()) {
 201  0
       newVector.addElement(enu.nextElement());
 202  
     }
 203  0
     return newVector.elements();
 204  
   }
 205  
 
 206  
   /**
 207  
    * Parses a given list of options. <p/>
 208  
    *
 209  
    <!-- options-start -->
 210  
    * Valid options are: <p/>
 211  
    * 
 212  
    * <pre> -E &lt;attribute evaluator specification&gt;
 213  
    *  Full class name of attribute evaluator, followed
 214  
    *  by its options.
 215  
    *  eg: "weka.attributeSelection.CfsSubsetEval -L"
 216  
    *  (default weka.attributeSelection.CfsSubsetEval)</pre>
 217  
    * 
 218  
    * <pre> -S &lt;search method specification&gt;
 219  
    *  Full class name of search method, followed
 220  
    *  by its options.
 221  
    *  eg: "weka.attributeSelection.BestFirst -D 1"
 222  
    *  (default weka.attributeSelection.BestFirst)</pre>
 223  
    * 
 224  
    * <pre> -D
 225  
    *  If set, classifier is run in debug mode and
 226  
    *  may output additional info to the console</pre>
 227  
    * 
 228  
    * <pre> -W
 229  
    *  Full name of base classifier.
 230  
    *  (default: weka.classifiers.trees.J48)</pre>
 231  
    * 
 232  
    * <pre> 
 233  
    * Options specific to classifier weka.classifiers.trees.J48:
 234  
    * </pre>
 235  
    * 
 236  
    * <pre> -U
 237  
    *  Use unpruned tree.</pre>
 238  
    * 
 239  
    * <pre> -C &lt;pruning confidence&gt;
 240  
    *  Set confidence threshold for pruning.
 241  
    *  (default 0.25)</pre>
 242  
    * 
 243  
    * <pre> -M &lt;minimum number of instances&gt;
 244  
    *  Set minimum number of instances per leaf.
 245  
    *  (default 2)</pre>
 246  
    * 
 247  
    * <pre> -R
 248  
    *  Use reduced error pruning.</pre>
 249  
    * 
 250  
    * <pre> -N &lt;number of folds&gt;
 251  
    *  Set number of folds for reduced error
 252  
    *  pruning. One fold is used as pruning set.
 253  
    *  (default 3)</pre>
 254  
    * 
 255  
    * <pre> -B
 256  
    *  Use binary splits only.</pre>
 257  
    * 
 258  
    * <pre> -S
 259  
    *  Don't perform subtree raising.</pre>
 260  
    * 
 261  
    * <pre> -L
 262  
    *  Do not clean up after the tree has been built.</pre>
 263  
    * 
 264  
    * <pre> -A
 265  
    *  Laplace smoothing for predicted probabilities.</pre>
 266  
    * 
 267  
    * <pre> -Q &lt;seed&gt;
 268  
    *  Seed for random data shuffling (default 1).</pre>
 269  
    * 
 270  
    <!-- options-end -->
 271  
    *
 272  
    * @param options the list of options as an array of strings
 273  
    * @throws Exception if an option is not supported
 274  
    */
 275  
   public void setOptions(String[] options) throws Exception {
 276  
 
 277  
     // same for attribute evaluator
 278  0
     String evaluatorString = Utils.getOption('E', options);
 279  0
     if (evaluatorString.length() == 0)
 280  0
       evaluatorString = weka.attributeSelection.CfsSubsetEval.class.getName();
 281  0
     String [] evaluatorSpec = Utils.splitOptions(evaluatorString);
 282  0
     if (evaluatorSpec.length == 0) {
 283  0
       throw new Exception("Invalid attribute evaluator specification string");
 284  
     }
 285  0
     String evaluatorName = evaluatorSpec[0];
 286  0
     evaluatorSpec[0] = "";
 287  0
     setEvaluator(ASEvaluation.forName(evaluatorName, evaluatorSpec));
 288  
 
 289  
     // same for search method
 290  0
     String searchString = Utils.getOption('S', options);
 291  0
     if (searchString.length() == 0)
 292  0
       searchString = weka.attributeSelection.BestFirst.class.getName();
 293  0
     String [] searchSpec = Utils.splitOptions(searchString);
 294  0
     if (searchSpec.length == 0) {
 295  0
       throw new Exception("Invalid search specification string");
 296  
     }
 297  0
     String searchName = searchSpec[0];
 298  0
     searchSpec[0] = "";
 299  0
     setSearch(ASSearch.forName(searchName, searchSpec));
 300  
 
 301  0
     super.setOptions(options);
 302  0
   }
 303  
 
 304  
   /**
 305  
    * Gets the current settings of the Classifier.
 306  
    *
 307  
    * @return an array of strings suitable for passing to setOptions
 308  
    */
 309  
   public String [] getOptions() {
 310  
 
 311  0
     String [] superOptions = super.getOptions();
 312  0
     String [] options = new String [superOptions.length + 4];
 313  
 
 314  0
     int current = 0;
 315  
 
 316  
     // same attribute evaluator
 317  0
     options[current++] = "-E";
 318  0
     options[current++] = "" +getEvaluatorSpec();
 319  
     
 320  
     // same for search
 321  0
     options[current++] = "-S";
 322  0
     options[current++] = "" + getSearchSpec();
 323  
 
 324  0
     System.arraycopy(superOptions, 0, options, current, 
 325  
                      superOptions.length);
 326  
     
 327  0
     return options;
 328  
   }
 329  
 
 330  
   /**
 331  
    * Returns the tip text for this property
 332  
    * @return tip text for this property suitable for
 333  
    * displaying in the explorer/experimenter gui
 334  
    */
 335  
   public String evaluatorTipText() {
 336  0
     return "Set the attribute evaluator to use. This evaluator is used "
 337  
       +"during the attribute selection phase before the classifier is "
 338  
       +"invoked.";
 339  
   }
 340  
 
 341  
   /**
 342  
    * Sets the attribute evaluator
 343  
    *
 344  
    * @param evaluator the evaluator with all options set.
 345  
    */
 346  
   public void setEvaluator(ASEvaluation evaluator) {
 347  0
     m_Evaluator = evaluator;
 348  0
   }
 349  
 
 350  
   /**
 351  
    * Gets the attribute evaluator used
 352  
    *
 353  
    * @return the attribute evaluator
 354  
    */
 355  
   public ASEvaluation getEvaluator() {
 356  0
     return m_Evaluator;
 357  
   }
 358  
 
 359  
   /**
 360  
    * Gets the evaluator specification string, which contains the class name of
 361  
    * the attribute evaluator and any options to it
 362  
    *
 363  
    * @return the evaluator string.
 364  
    */
 365  
   protected String getEvaluatorSpec() {
 366  
     
 367  0
     ASEvaluation e = getEvaluator();
 368  0
     if (e instanceof OptionHandler) {
 369  0
       return e.getClass().getName() + " "
 370  
         + Utils.joinOptions(((OptionHandler)e).getOptions());
 371  
     }
 372  0
     return e.getClass().getName();
 373  
   }
 374  
 
 375  
   /**
 376  
    * Returns the tip text for this property
 377  
    * @return tip text for this property suitable for
 378  
    * displaying in the explorer/experimenter gui
 379  
    */
 380  
   public String searchTipText() {
 381  0
     return "Set the search method. This search method is used "
 382  
       +"during the attribute selection phase before the classifier is "
 383  
       +"invoked.";
 384  
   }
 385  
   
 386  
   /**
 387  
    * Sets the search method
 388  
    *
 389  
    * @param search the search method with all options set.
 390  
    */
 391  
   public void setSearch(ASSearch search) {
 392  0
     m_Search = search;
 393  0
   }
 394  
 
 395  
   /**
 396  
    * Gets the search method used
 397  
    *
 398  
    * @return the search method
 399  
    */
 400  
   public ASSearch getSearch() {
 401  0
     return m_Search;
 402  
   }
 403  
 
 404  
   /**
 405  
    * Gets the search specification string, which contains the class name of
 406  
    * the search method and any options to it
 407  
    *
 408  
    * @return the search string.
 409  
    */
 410  
   protected String getSearchSpec() {
 411  
     
 412  0
     ASSearch s = getSearch();
 413  0
     if (s instanceof OptionHandler) {
 414  0
       return s.getClass().getName() + " "
 415  
         + Utils.joinOptions(((OptionHandler)s).getOptions());
 416  
     }
 417  0
     return s.getClass().getName();
 418  
   }
 419  
 
 420  
   /**
 421  
    * Returns default capabilities of the classifier.
 422  
    *
 423  
    * @return      the capabilities of this classifier
 424  
    */
 425  
   public Capabilities getCapabilities() {
 426  
     Capabilities        result;
 427  
     
 428  0
     if (getEvaluator() == null)
 429  0
       result = super.getCapabilities();
 430  
     else
 431  0
       result = getEvaluator().getCapabilities();
 432  
     
 433  
     // set dependencies
 434  0
     for (Capability cap: Capability.values())
 435  0
       result.enableDependency(cap);
 436  
     
 437  0
     return result;
 438  
   }
 439  
 
 440  
   /**
 441  
    * Build the classifier on the dimensionally reduced data.
 442  
    *
 443  
    * @param data the training data
 444  
    * @throws Exception if the classifier could not be built successfully
 445  
    */
 446  
   public void buildClassifier(Instances data) throws Exception {
 447  0
     if (m_Classifier == null) {
 448  0
       throw new Exception("No base classifier has been set!");
 449  
     }
 450  
 
 451  0
     if (m_Evaluator == null) {
 452  0
       throw new Exception("No attribute evaluator has been set!");
 453  
     }
 454  
 
 455  0
     if (m_Search == null) {
 456  0
       throw new Exception("No search method has been set!");
 457  
     }
 458  
    
 459  
     // can classifier handle the data?
 460  0
     getCapabilities().testWithFail(data);
 461  
 
 462  
     // remove instances with missing class
 463  0
     Instances newData = new Instances(data);
 464  0
     newData.deleteWithMissingClass();
 465  
     
 466  0
     if (newData.numInstances() == 0) {
 467  0
       m_Classifier.buildClassifier(newData);
 468  0
       return;
 469  
     }
 470  0
     if (newData.classAttribute().isNominal()) {
 471  0
       m_numClasses = newData.classAttribute().numValues();
 472  
     } else {
 473  0
       m_numClasses = 1;
 474  
     }
 475  
 
 476  0
     Instances resampledData = null;
 477  
     // check to see if training data has all equal weights
 478  0
     double weight = newData.instance(0).weight();
 479  0
     boolean ok = false;
 480  0
     for (int i = 1; i < newData.numInstances(); i++) {
 481  0
       if (newData.instance(i).weight() != weight) {
 482  0
         ok = true;
 483  0
         break;
 484  
       }
 485  
     }
 486  
     
 487  0
     if (ok) {
 488  0
       if (!(m_Evaluator instanceof WeightedInstancesHandler) || 
 489  
           !(m_Classifier instanceof WeightedInstancesHandler)) {
 490  0
         Random r = new Random(1);
 491  0
         for (int i = 0; i < 10; i++) {
 492  0
           r.nextDouble();
 493  
         }
 494  0
         resampledData = newData.resampleWithWeights(r);
 495  0
       }
 496  
     } else {
 497  
       // all equal weights in the training data so just use as is
 498  0
       resampledData = newData;
 499  
     }
 500  
 
 501  0
     m_AttributeSelection = new AttributeSelection();
 502  0
     m_AttributeSelection.setEvaluator(m_Evaluator);
 503  0
     m_AttributeSelection.setSearch(m_Search);
 504  0
     long start = System.currentTimeMillis();
 505  0
     m_AttributeSelection.
 506  
       SelectAttributes((m_Evaluator instanceof WeightedInstancesHandler) 
 507  
                        ? newData
 508  
                        : resampledData);
 509  0
     long end = System.currentTimeMillis();
 510  0
     if (m_Classifier instanceof WeightedInstancesHandler) {
 511  0
       newData = m_AttributeSelection.reduceDimensionality(newData);
 512  0
       m_Classifier.buildClassifier(newData);
 513  
     } else {
 514  0
       resampledData = m_AttributeSelection.reduceDimensionality(resampledData);
 515  0
       m_Classifier.buildClassifier(resampledData);
 516  
     }
 517  
 
 518  0
     long end2 = System.currentTimeMillis();
 519  0
     m_numAttributesSelected = m_AttributeSelection.numberAttributesSelected();
 520  0
     m_ReducedHeader = 
 521  
       new Instances((m_Classifier instanceof WeightedInstancesHandler) ?
 522  
                     newData
 523  
                     : resampledData, 0);
 524  0
     m_selectionTime = (double)(end - start);
 525  0
     m_totalTime = (double)(end2 - start);
 526  0
   }
 527  
 
 528  
   /**
 529  
    * Classifies a given instance after attribute selection
 530  
    *
 531  
    * @param instance the instance to be classified
 532  
    * @return the class distribution
 533  
    * @throws Exception if instance could not be classified
 534  
    * successfully
 535  
    */
 536  
   public double [] distributionForInstance(Instance instance)
 537  
     throws Exception {
 538  
 
 539  
     Instance newInstance;
 540  0
     if (m_AttributeSelection == null) {
 541  
       //      throw new Exception("AttributeSelectedClassifier: No model built yet!");
 542  0
       newInstance = instance;
 543  
     } else {
 544  0
       newInstance = m_AttributeSelection.reduceDimensionality(instance);
 545  
     }
 546  
 
 547  0
     return m_Classifier.distributionForInstance(newInstance);
 548  
   }
 549  
 
 550  
   /**
 551  
    *  Returns the type of graph this classifier
 552  
    *  represents.
 553  
    *  
 554  
    *  @return the type of graph
 555  
    */   
 556  
   public int graphType() {
 557  
     
 558  0
     if (m_Classifier instanceof Drawable)
 559  0
       return ((Drawable)m_Classifier).graphType();
 560  
     else 
 561  0
       return Drawable.NOT_DRAWABLE;
 562  
   }
 563  
 
 564  
   /**
 565  
    * Returns graph describing the classifier (if possible).
 566  
    *
 567  
    * @return the graph of the classifier in dotty format
 568  
    * @throws Exception if the classifier cannot be graphed
 569  
    */
 570  
   public String graph() throws Exception {
 571  
     
 572  0
     if (m_Classifier instanceof Drawable)
 573  0
       return ((Drawable)m_Classifier).graph();
 574  0
     else throw new Exception("Classifier: " + getClassifierSpec()
 575  
                              + " cannot be graphed");
 576  
   }
 577  
 
 578  
   /**
 579  
    * Output a representation of this classifier
 580  
    * 
 581  
    * @return a representation of this classifier
 582  
    */
 583  
   public String toString() {
 584  0
     if (m_AttributeSelection == null) {
 585  0
       return "AttributeSelectedClassifier: No attribute selection possible.\n\n"
 586  
         +m_Classifier.toString();
 587  
     }
 588  
 
 589  0
     StringBuffer result = new StringBuffer();
 590  0
     result.append("AttributeSelectedClassifier:\n\n");
 591  0
     result.append(m_AttributeSelection.toResultsString());
 592  0
     result.append("\n\nHeader of reduced data:\n"+m_ReducedHeader.toString());
 593  0
     result.append("\n\nClassifier Model\n"+m_Classifier.toString());
 594  
 
 595  0
     return result.toString();
 596  
   }
 597  
 
 598  
   /**
 599  
    * Additional measure --- number of attributes selected
 600  
    * @return the number of attributes selected
 601  
    */
 602  
   public double measureNumAttributesSelected() {
 603  0
     return m_numAttributesSelected;
 604  
   }
 605  
 
 606  
   /**
 607  
    * Additional measure --- time taken (milliseconds) to select the attributes
 608  
    * @return the time taken to select attributes
 609  
    */
 610  
   public double measureSelectionTime() {
 611  0
     return m_selectionTime;
 612  
   }
 613  
 
 614  
   /**
 615  
    * Additional measure --- time taken (milliseconds) to select attributes
 616  
    * and build the classifier
 617  
    * @return the total time (select attributes + build classifier)
 618  
    */
 619  
   public double measureTime() {
 620  0
     return m_totalTime;
 621  
   }
 622  
 
 623  
   /**
 624  
    * Returns an enumeration of the additional measure names
 625  
    * @return an enumeration of the measure names
 626  
    */
 627  
   public Enumeration enumerateMeasures() {
 628  0
     Vector newVector = new Vector(3);
 629  0
     newVector.addElement("measureNumAttributesSelected");
 630  0
     newVector.addElement("measureSelectionTime");
 631  0
     newVector.addElement("measureTime");
 632  0
     if (m_Classifier instanceof AdditionalMeasureProducer) {
 633  0
       Enumeration en = ((AdditionalMeasureProducer)m_Classifier).
 634  
         enumerateMeasures();
 635  0
       while (en.hasMoreElements()) {
 636  0
         String mname = (String)en.nextElement();
 637  0
         newVector.addElement(mname);
 638  0
       }
 639  
     }
 640  0
     return newVector.elements();
 641  
   }
 642  
   
 643  
   /**
 644  
    * Returns the value of the named measure
 645  
    * @param additionalMeasureName the name of the measure to query for its value
 646  
    * @return the value of the named measure
 647  
    * @throws IllegalArgumentException if the named measure is not supported
 648  
    */
 649  
   public double getMeasure(String additionalMeasureName) {
 650  0
     if (additionalMeasureName.compareToIgnoreCase("measureNumAttributesSelected") == 0) {
 651  0
       return measureNumAttributesSelected();
 652  0
     } else if (additionalMeasureName.compareToIgnoreCase("measureSelectionTime") == 0) {
 653  0
       return measureSelectionTime();
 654  0
     } else if (additionalMeasureName.compareToIgnoreCase("measureTime") == 0) {
 655  0
       return measureTime();
 656  0
     } else if (m_Classifier instanceof AdditionalMeasureProducer) {
 657  0
       return ((AdditionalMeasureProducer)m_Classifier).
 658  
         getMeasure(additionalMeasureName);
 659  
     } else {
 660  0
       throw new IllegalArgumentException(additionalMeasureName 
 661  
                           + " not supported (AttributeSelectedClassifier)");
 662  
     }
 663  
   }
 664  
   
 665  
   /**
 666  
    * Returns the revision string.
 667  
    * 
 668  
    * @return                the revision
 669  
    */
 670  
   public String getRevision() {
 671  0
     return RevisionUtils.extract("$Revision: 8034 $");
 672  
   }
 673  
 
 674  
   /**
 675  
    * Main method for testing this class.
 676  
    *
 677  
    * @param argv should contain the following arguments:
 678  
    * -t training file [-T test file] [-c class index]
 679  
    */
 680  
   public static void main(String [] argv) {
 681  0
     runClassifier(new AttributeSelectedClassifier(), argv);
 682  0
   }
 683  
 }