METODY SZTUCZNEJ
INTELIGENCJI
ZADANIE I
Odczyt danych z pliku irys.tab zapisanych w formacie tab z wykorzystaniem bibliotek rseslib.
http://rsproject.mimuw.edu.pl./components.html
http://logic.mimuw.edu.pl/~rses/
http://rsproject.mimuw.edu.pl/
http://tunedit.org/
http://tunedit.org/challenge/RSCTC-2010-A?m=overview
/* * To change this template, choose Tools |
Templates * and open the
template in the editor. * * *
http://rsproject.mimuw.edu.pl./components.html * * http://logic.mimuw.edu.pl/~rses/ * * http://rsproject.mimuw.edu.pl/ * * http://tunedit.org/ * *
http://tunedit.org/challenge/RSCTC-2010-A?m=overview * * */ package
javaapplication3; import java.util.logging.Level; import java.util.logging.Logger; import rseslib.example.*; import rseslib.structure.attribute.formats.HeaderFormatException; import rseslib.structure.data.formats.DataFormatException; import rseslib.system.*; import rseslib.structure.*; import rseslib.processing.clustering.*; import rseslib.structure.attribute.*; import rseslib.structure.data.*; import rseslib.structure.table.*; import rseslib.system.progress.*; import java.io.*; /** * * @author student */ public class /** * @param args the command line arguments */ public static void main(String[] args) throws DataFormatException
{ // TODO code application logic here try{ Header hdr
= new ArrayHeader(new File("D:/data/irys.tab")); DoubleDataTable
table = new ArrayListDoubleDataTable( new File("D:/data/irys.tab"), hdr, new StdOutProgress()); } catch (HeaderFormatException
ex) { System.out.println(ex.getMessage()); } catch (BadHeaderException
ex) { System.out.println(ex.getMessage()); } catch (InterruptedException
ex) { System.out.println(ex.getMessage()); } catch(IOException
ex){ System.out.println(ex.getMessage()); }
} } |
ZADANIE II
Odczyt danych z pliku iris.arff zapisanych w formacie arff z wykorzystaniem bibliotek weka.
/* * To change this template, choose Tools |
Templates * and open the
template in the editor. */ import
weka.core.Instances; import java.io.*; import
java.io.IOException; import
weka.filters.Filter; import
weka.classifiers.meta.FilteredClassifier; import
weka.classifiers.trees.J48; import
weka.filters.unsupervised.attribute.Remove; import
weka.classifiers.Evaluation; import
java.util.Random; /** * * @author student */ public
class public static void printDatasetINFO(Instances
data) { System.out.println("Dataset has been read...."); System.out.println("Number
of objects " + data.numInstances()); System.out.println("Number
of attributes " + data.numAttributes()); System.out.println("Number
of classes " + data.numClasses()); } public
static void setDatasetClassified(J48 tree) { try{ // load unlabeled data Instances unlabeled = new Instances( new BufferedReader( new FileReader("D:\\data\\iris.arff"))); // set class attribute unlabeled.setClassIndex(unlabeled.numAttributes() - 1); // create copy Instances labeled = new Instances(unlabeled); // label instances for (int i = 0; i < unlabeled.numInstances(); i++)
{ double clsLabel
= tree.classifyInstance(unlabeled.instance(i)); labeled.instance(i).setClassValue(clsLabel); } // save labeled data BufferedWriter
writer = new BufferedWriter( new FileWriter("D:\\data\\iris2.arff")); writer.write(labeled.toString()); writer.newLine(); writer.flush(); writer.close(); } catch(IOException
ex) { System.out.println(ex.getMessage()); } catch(Exception ex) { System.out.println(ex.getMessage()); } } /** * @param args the command line arguments */ public static void main(String[] args) { // TODO code application logic here try{ BufferedReader
reader1 = new BufferedReader( new FileReader("D:\\data\\iris.arff")); Instances data = new
Instances(reader1); reader1.close(); data.setClassIndex(data.numAttributes() - 1); BufferedReader
reader2 = new BufferedReader( new FileReader("D:\\data\\iris.arff")); Instances test = new
Instances(reader2); reader2.close(); test.setClassIndex(data.numAttributes() - 1); printDatasetINFO(data); // create new instance of scheme weka.classifiers.functions.SMO
scheme = new weka.classifiers.functions.SMO(); // set options scheme.setOptions(weka.core.Utils.splitOptions("-C 1.0 -L 0.0010 -P
1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.PolyKernel
-C 250007 -E 1.0\"")); String[] options = new String[2]; options[0] = "-R"; //
"range" options[1] = "1"; // first
attribute Remove remove
= new Remove();
// new instance of filter remove.setOptions(options); // set options remove.setInputFormat(data); // inform filter
about dataset **AFTER** setting options Instances newData
= Filter.useFilter(data, remove); // apply filter printDatasetINFO(newData); Remove rm =
new Remove(); rm.setAttributeIndices("1"); // remove 1st attribute // classifier J48 j48 =
new J48(); j48.setUnpruned(true); // using an unpruned
J48 // meta-classifier FilteredClassifier
fc = new FilteredClassifier(); fc.setFilter(rm); fc.setClassifier(j48); // train and make predictions fc.buildClassifier(data); for (int i = 0; i < test.numInstances(); i++) { double pred
= fc.classifyInstance(test.instance(i)); System.out.print("ID:
" + test.instance(i).value(0)); System.out.print(",
actual: " + test.classAttribute().value((int) test.instance(i).classValue())); System.out.println(",
predicted: " + test.classAttribute().value((int) pred)); } options = new String[1]; options[0] = "-U"; // unpruned
tree J48 tree = new J48(); // new instance of tree tree.setOptions(options); // set the options tree.buildClassifier(data); // build classifier Evaluation eval
= new Evaluation(newData); eval.crossValidateModel(tree,
newData, 10, new Random(1)); eval = new
Evaluation(data); eval.evaluateModel(tree,
test); System.out.println(eval.toSummaryString("\nResults\n======\n",
false)); options = new String[2]; options[0] = "-t"; options[1] =
"D:\\data\\iris.arff"; System.out.println(Evaluation.evaluateModel(new J48(), options)); setDatasetClassified(tree); } catch(IOException
ex){ } catch(Exception ex){ } } } |