001package org.dllearner.cli;
002import com.google.common.collect.Sets;
003import org.dllearner.algorithms.decisiontrees.dsttdt.DSTTDTClassifier;
004import org.dllearner.algorithms.decisiontrees.tdt.TDTClassifier;
005import org.dllearner.core.*;
006import org.dllearner.learningproblems.Heuristics;
007import org.dllearner.learningproblems.PosNegLP;
008import org.dllearner.learningproblems.PosNegUndLP;
009import org.dllearner.learningproblems.PosOnlyLP;
010import org.dllearner.utilities.Helper;
011import org.dllearner.utilities.owl.ManchesterOWLSyntaxOWLObjectRendererImplExt;
012import org.dllearner.utilities.owl.OWLClassExpressionUtils;
013import org.dllearner.utilities.statistics.Stat;
014import org.semanticweb.owlapi.model.OWLClassExpression;
015import org.semanticweb.owlapi.model.OWLDataFactory;
016import org.semanticweb.owlapi.model.OWLIndividual;
017import org.semanticweb.owlapi.model.OWLObjectComplementOf;
018import org.semanticweb.owlapi.util.SimpleShortFormProvider;
019import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl;
020
021import java.lang.reflect.InvocationTargetException;
022import java.text.DecimalFormat;
023import java.util.*;
024import java.util.concurrent.ExecutorService;
025import java.util.concurrent.Executors;
026import java.util.concurrent.TimeUnit;
027
028public class CrossValidation2  extends org.dllearner.cli.CrossValidation{
029
030        DecimalFormat df = new DecimalFormat();
031        protected Stat commissionTraining = new Stat(); // commission, omission and induction rate for
032        protected Stat commission = new Stat();
033        protected Stat omissionTraining = new Stat();
034        protected Stat omission = new Stat();
035        protected Stat inductionTraining = new Stat();
036        protected Stat induction = new Stat();
037
038        public CrossValidation2() {
039                super(); //superclass constuctor
040        }
041
042        public CrossValidation2(AbstractCELA la, AbstractClassExpressionLearningProblem lp, final AbstractReasonerComponent rs, int folds, boolean leaveOneOut) {
043                super();
044                ManchesterOWLSyntaxOWLObjectRendererImplExt renderer = new ManchesterOWLSyntaxOWLObjectRendererImplExt();
045                StringRenderer.setRenderer(renderer);
046                StringRenderer.setShortFormProvider(new SimpleShortFormProvider());
047
048                // the training and test sets used later on
049                List<Set<OWLIndividual>> trainingSetsPos = new LinkedList<>();
050                List<Set<OWLIndividual>> trainingSetsNeg = new LinkedList<>();
051                List<Set<OWLIndividual>> trainingSetsUnd = new LinkedList<>();
052                List<Set<OWLIndividual>> testSetsPos = new LinkedList<>();
053                List<Set<OWLIndividual>> testSetsNeg = new LinkedList<>();
054                List<Set<OWLIndividual>> testSetsUnd = new LinkedList<>();
055                // get examples and shuffle them too
056                Set<OWLIndividual> posExamples;
057                Set<OWLIndividual> negExamples;
058                Set<OWLIndividual> undExamples; // initialization when other learning problems are considered
059                if(lp instanceof PosNegLP){
060                        posExamples = ((PosNegLP)lp).getPositiveExamples();
061                        negExamples = ((PosNegLP)lp).getNegativeExamples();
062                        undExamples= new TreeSet<>();
063
064                        if(lp instanceof PosNegUndLP){
065
066                                undExamples=((PosNegUndLP)lp).getUncertainExamples();
067                        }
068                } else if(lp instanceof PosOnlyLP){
069                        posExamples = ((PosNegLP)lp).getPositiveExamples();
070                        negExamples = new HashSet<>();
071                        undExamples= new TreeSet<>();
072                } else {
073                        throw new IllegalArgumentException("Only PosNeg and PosOnly learning problems are supported");
074                }
075                List<OWLIndividual> posExamplesList = new LinkedList<>(posExamples);
076                List<OWLIndividual> negExamplesList = new LinkedList<>(negExamples);
077                List<OWLIndividual> undExamplesList = new LinkedList<>(undExamples);
078                //System.out.println("Undefined membership: "+undExamples);
079                Collections.shuffle(posExamplesList, new Random(1));
080                Collections.shuffle(negExamplesList, new Random(2));
081                if(lp instanceof PosNegUndLP){
082                        Collections.shuffle(undExamplesList, new Random(3));
083
084                }
085
086                // sanity check whether nr. of folds makes sense for this benchmark
087                //              if(!leaveOneOut && (posExamples.size()<folds && negExamples.size()<folds && undExamples.size()<folds)) {
088                //                      System.out.println("The number of folds is higher than the number of "
089                //                                      + "positive/negative examples. This can result in empty test sets. Exiting.");
090                //                      System.exit(0);
091                //              }
092                //removed in order to support also imbalanced distributions of data
093
094                if(leaveOneOut) {
095                        // note that leave-one-out is not identical to k-fold with
096                        // k = nr. of examples in the current implementation, because
097                        // with n folds and n examples there is no guarantee that a fold
098                        // is never empty (this is an implementation issue)
099                        int nrOfExamples = posExamples.size() + negExamples.size();
100                        for(int i = 0; i < nrOfExamples; i++) {
101                                // ...
102                        }
103                        System.out.println("Leave-one-out not supported yet.");
104                        System.exit(1);
105                } else {
106                        // calculating where to split the sets, ; note that we split
107                        // positive and negative examples separately such that the
108                        // distribution of positive and negative examples remains similar
109                        // (note that there are better but more complex ways to implement this,
110                        // which guarantee that the sum of the elements of a fold for pos
111                        // and neg differs by at most 1 - it can differ by 2 in our implementation,
112                        // e.g. with 3 folds, 4 pos. examples, 4 neg. examples)
113                        int[] splitsPos = calculateSplits(posExamples.size(),folds);
114                        int[] splitsNeg = calculateSplits(negExamples.size(),folds);
115                        int[] splitsUnd = calculateSplits(undExamples.size(),folds);
116
117                        //                      System.out.println("<"+posExamples.size());
118                        //                      System.out.println("<"+negExamples.size());
119                        //                      System.out.println("<"+undExamples.size());
120                        //                      System.out.println("---"+splitsPos[0]);
121                        //                      System.out.println("---"+splitsNeg[0]);
122                        //                      System.out.println("---"+splitsUnd[0]);
123                        // calculating training and test sets
124                        for(int i=0; i<folds; i++) {
125                                Set<OWLIndividual> testPos = getTestingSet(posExamplesList, splitsPos, i);
126                                Set<OWLIndividual> testNeg = getTestingSet(negExamplesList, splitsNeg, i);
127                                Set<OWLIndividual> testUnd = getTestingSet(undExamplesList, splitsUnd, i);
128                                testSetsPos.add(i, testPos);
129                                testSetsNeg.add(i, testNeg);
130                                testSetsUnd.add(i,testUnd);
131                                trainingSetsPos.add(i, getTrainingSet(posExamples, testPos));
132                                trainingSetsNeg.add(i, getTrainingSet(negExamples, testNeg));
133                                trainingSetsUnd.add(i, getTrainingSet(undExamples, testUnd));
134                        }
135
136                        //System.out.println("Test set size: "+testSetsPos.size());
137                }
138
139                // run the algorithm
140                if( multiThreaded && lp instanceof Cloneable && la instanceof Cloneable){
141                        ExecutorService es = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()-1);
142                        for(int currFold=0; currFold<folds; currFold++) {
143                                try {
144                                        final AbstractClassExpressionLearningProblem lpClone = (AbstractClassExpressionLearningProblem) lp.getClass().getMethod("clone").invoke(lp);
145                                        final Set<OWLIndividual> trainPos = trainingSetsPos.get(currFold);
146                                        final Set<OWLIndividual> trainNeg = trainingSetsNeg.get(currFold);
147
148                                        final Set<OWLIndividual> testPos = testSetsPos.get(currFold);
149                                        final Set<OWLIndividual> testNeg = testSetsNeg.get(currFold);
150                                        final Set<OWLIndividual> trainUnd = trainingSetsUnd.get(currFold);
151                                        final Set<OWLIndividual> testUnd = testSetsUnd.get(currFold);
152
153                                        if(lp instanceof PosNegLP){
154                                                ((PosNegLP)lpClone).setPositiveExamples(trainPos);
155                                                ((PosNegLP)lpClone).setNegativeExamples(trainNeg);
156                                                if (lp instanceof PosNegUndLP){
157                                                        ((PosNegUndLP)lpClone).setUncertainExamples(trainUnd);
158                                                }
159                                        } else if(lp instanceof PosOnlyLP){
160                                                ((PosOnlyLP)lpClone).setPositiveExamples(new TreeSet<>(trainPos));
161                                        }
162                                        final AbstractCELA laClone = (AbstractCELA) la.getClass().getMethod("clone").invoke(la);
163                                        final int i = currFold;
164
165                                        es.submit(new Runnable() {
166
167                                                @Override
168                                                public void run() {
169                                                        try {
170                                                                if(lpClone instanceof PosNegUndLP)
171                                                                        validate(laClone, lpClone, rs, i, trainPos, trainNeg, trainUnd, testPos, testNeg, testUnd);
172                                                                else
173                                                                        validate(laClone, lpClone, rs, i, trainPos, trainNeg, null, testPos, testNeg, null);
174                                                        } catch (Exception e) {
175                                                                e.printStackTrace();
176                                                        }
177                                                }
178                                        });
179                                } catch (IllegalAccessException | SecurityException | NoSuchMethodException | InvocationTargetException | IllegalArgumentException e) {
180                                        e.printStackTrace();
181                                }
182                        }
183                        es.shutdown();
184                        try {
185                                es.awaitTermination(1, TimeUnit.DAYS);
186                        } catch (InterruptedException e) {
187                                e.printStackTrace();
188                        }
189                }
190                else {
191                        for(int currFold=0; currFold<folds; currFold++) {
192                                final Set<OWLIndividual> trainPos = trainingSetsPos.get(currFold);
193                                final Set<OWLIndividual> trainNeg = trainingSetsNeg.get(currFold);
194                                final Set<OWLIndividual> trainUnd = trainingSetsUnd.get(currFold);
195                                final Set<OWLIndividual> testPos = testSetsPos.get(currFold);
196                                final Set<OWLIndividual> testNeg = testSetsNeg.get(currFold);
197                                final Set<OWLIndividual> testUnd = testSetsUnd.get(currFold);
198                                //                              System.out.println("testUnd size: "+ trainUnd);
199                                //                              System.exit(0);
200
201                                if(lp instanceof PosNegLP){
202                                        ((PosNegLP)lp).setPositiveExamples(trainPos);
203                                        ((PosNegLP)lp).setNegativeExamples(trainNeg);
204                                        if(lp instanceof PosNegUndLP){
205                                                ((PosNegUndLP)lp).setUncertainExamples(trainUnd);
206                                        }
207                                } else if(lp instanceof PosOnlyLP){
208                                        ((PosOnlyLP)lp).setPositiveExamples(new TreeSet<>(trainPos));
209                                }
210
211                                //System.out.println("Training set negative"+trainNeg.size());
212                                //System.out.println("Training set unlabeles"+trainUnd.size());
213                                validate(la, lp, rs, currFold, trainPos, trainNeg, trainUnd, testPos, testNeg, testUnd);
214                        }
215                }
216
217                outputWriter("");
218                outputWriter("Finished " + folds + "-folds cross-validation.");
219                outputWriter("runtime: " + statOutput(df, runtime, "s"));
220                outputWriter("length: " + statOutput(df, length, ""));
221                outputWriter("F-Measure on training set: " + statOutput(df, fMeasureTraining, "%"));
222                outputWriter("F-Measure: " + statOutput(df, fMeasure, "%"));
223                outputWriter("Match rate on training set: " + statOutput(df, accuracyTraining, "%"));
224                outputWriter("Match rate: " + statOutput(df, accuracy, "%"));
225                outputWriter("Commission rate: " + statOutput(df, commission, "%"));
226                outputWriter("Omission rate: " + statOutput(df, omission, "%"));
227                outputWriter("Induction rate: "+statOutput(df, induction, "%"));
228        }
229
230        private void validate(AbstractCELA la, AbstractClassExpressionLearningProblem lp, AbstractReasonerComponent rs,
231                        int currFold, Set<OWLIndividual> trainPos, Set<OWLIndividual> trainNeg,Set<OWLIndividual> trainUnd, Set<OWLIndividual> testPos, Set<OWLIndividual> testNeg, Set<OWLIndividual> testUnd){
232                //System.out.println("Validation starting");
233                Set<String> pos = Helper.getStringSet(trainPos);
234                Set<String> neg = Helper.getStringSet(trainNeg);
235                Set<String> und = Helper.getStringSet(trainUnd);
236                String output = "";
237                TreeSet<String> treeSetPos = new TreeSet<>(pos);
238                output += "+" + treeSetPos + "\n";
239                TreeSet<String> treeSetNeg = new TreeSet<>(neg);
240                output += "-" + treeSetNeg + "\n";
241                TreeSet<String> treeSetUnd = new TreeSet<>(und);
242                output += "?" + treeSetUnd + "\n";
243                //System.out.printf("Learning algoritm preparation: %d %d %d \n", treeSetPos.size(),treeSetNeg.size(),treeSetUnd.size());
244                try {
245                        lp.init();
246                        la.setLearningProblem(lp);
247                        la.init();
248                        //System.out.println("setting learning problem");
249                } catch (ComponentInitException e) {
250                        // TODO Auto-generated catch block
251                        e.printStackTrace();
252                }
253
254                long algorithmStartTime = System.nanoTime();
255                la.start();
256                long algorithmDuration = System.nanoTime() - algorithmStartTime;
257                runtime.addNumber(algorithmDuration/(double)1000000000);
258
259                int trainingCorrectPosClassified,trainingCorrectNegClassified,trainingCorrectUndClassified,trainingCorrectExamples;
260                int trainingSize;
261                double trainingAccuracy;
262                int negAsPosTraining;
263                int posAsNegTraining;
264
265                int undAsPosTraining;
266                int undAsNegTraining;
267
268                int posAsUndTraining; //omission cases
269                int negAsUndTraining;
270
271                int commissions;
272
273                double trainingCommission;
274                int inductions;
275
276                double trainingInduction;
277
278                int omissions ;
279
280                int negAsPos;
281
282                int posAsNeg; // commission cases
283                int undAsPos;
284                int undAsNeg;  //induction cases
285
286                int posAsUnd; //omission cases
287                int negAsUnd;
288
289                double currCommission;
290                double currInduction;
291                double currOmission;
292
293                int correctPosClassified ;//getCorrectPosClassified(rs, concept, testPos);
294                int correctNegClassified ;
295                int correctUndClassified ;
296                int correctExamples;
297
298                double trainingOmission;
299
300                double currAccuracy;
301
302                OWLClassExpression concept = la.getCurrentlyBestDescription();
303                int testsize = testPos.size()+
304                                testNeg.size()+testUnd.size();
305                if(!(la instanceof DSTTDTClassifier)&& !(la instanceof TDTClassifier)){
306                        //System.out.println("Training  completed");
307                        // extract the current concept description
308                        //System.out.println("Training  completed"+ concept);
309
310                        Set<OWLIndividual> tmp = rs.hasType(concept, testPos);
311                        Set<OWLIndividual> tmp2 = Sets.difference(testPos, tmp);
312                        Set<OWLIndividual> tmp3 = rs.hasType(concept, testNeg);
313
314                        // calculate training accuracies
315
316                        trainingCorrectPosClassified = getCorrectPosClassified(rs, concept, trainPos);
317                        trainingCorrectNegClassified = getCorrectNegClassified(rs, concept, trainNeg);
318                        trainingCorrectUndClassified = getCorrectUndClassified(rs, concept, trainUnd);
319                        trainingCorrectExamples = trainingCorrectUndClassified+trainingCorrectPosClassified + trainingCorrectNegClassified;
320                        trainingSize = trainPos.size()+
321                                        trainNeg.size()+trainUnd.size();
322                        trainingAccuracy = 100*((double)trainingCorrectExamples/trainingSize);
323
324                        //System.out.println("Training Correct Examples: "+ trainingCorrectExamples+ " Size: "+trainingSize);
325                        accuracyTraining.addNumber(trainingAccuracy); //in a ternary setting this is the match rate
326
327                        //compute training match (accuracy), commission omission and induction
328                        OWLDataFactory factory= new OWLDataFactoryImpl(); //get a data factory for derive the complement concept description
329                        negAsPosTraining = rs.hasType(concept, trainNeg).size();
330                        posAsNegTraining= rs.hasType(factory.getOWLObjectComplementOf(concept), trainPos).size(); // commission cases
331
332                        undAsPosTraining = rs.hasType(concept, trainUnd).size();
333                        undAsNegTraining= rs.hasType(factory.getOWLObjectComplementOf(concept), trainUnd).size();  //induction cases
334
335                        posAsUndTraining= trainPos.size()-trainingCorrectPosClassified - posAsNegTraining; //omission cases
336                        negAsUndTraining= trainNeg.size()-trainingCorrectNegClassified - negAsPosTraining;
337
338                        commissions = negAsPosTraining+posAsNegTraining;
339                        //System.out.println("Training commissions: "+ commissions);
340                        trainingCommission=100*((double)commissions/trainingSize);
341                        inductions = undAsPosTraining+undAsNegTraining;
342
343                        trainingInduction=100*((double)inductions/trainingSize);
344                        //System.out.println("Training inductions: "+ trainingInduction);
345                        omissions = posAsUndTraining+negAsUndTraining;
346
347                        trainingOmission=100*((double)omissions/trainingSize);
348                        //System.out.println("Training omissions: "+ trainingOmission);
349
350                        commissionTraining.addNumber(trainingCommission);
351                        inductionTraining.addNumber(trainingInduction);
352                        omissionTraining.addNumber(trainingOmission);
353
354                        // calculate test accuracies
355                        correctPosClassified = getCorrectPosClassified(rs, concept, testPos);
356                        correctNegClassified = getCorrectNegClassified(rs, concept, testNeg);
357                        correctUndClassified = getCorrectUndClassified(rs, concept, testUnd);
358                        correctExamples = correctUndClassified+correctPosClassified + correctNegClassified;
359
360                        currAccuracy = 100*((double)correctExamples/testsize);
361                        accuracy.addNumber(currAccuracy);
362
363                        // commission omission and induction on the test set
364                        negAsPos = rs.hasType(concept, testNeg).size();
365
366                        posAsNeg= rs.hasType(factory.getOWLObjectComplementOf(concept), testPos).size(); // commission cases
367                        undAsPos = rs.hasType(concept, testUnd).size();
368                        undAsNeg= rs.hasType(factory.getOWLObjectComplementOf(concept), testUnd).size();  //induction cases
369
370                        posAsUnd= testPos.size()-correctPosClassified - posAsNeg; //omission cases
371                        negAsUnd= testNeg.size()-correctNegClassified - negAsPos;
372
373                        currCommission=100*((double)(negAsPos+posAsNeg)/testsize);
374                        currInduction=100*((double)(undAsPos+undAsNeg)/testsize);
375                        currOmission=100*((double)(posAsUnd+negAsUnd)/testsize);
376
377                        commission.addNumber(currCommission);
378                        omission.addNumber(currOmission);
379                        induction.addNumber(currInduction);
380                        // calculate training F-Score
381
382                        double precisionTraining = trainingCorrectPosClassified + negAsPosTraining == 0 ? 0 : trainingCorrectPosClassified / (double) (trainingCorrectPosClassified + negAsPosTraining);
383                        double recallTraining = trainingCorrectPosClassified / (double) trainPos.size();
384                        System.out.println(precisionTraining +"----"+recallTraining);
385                        //                      System.exit(1);
386                        fMeasureTraining.addNumber(100*Heuristics.getFScore(recallTraining, precisionTraining));
387
388                        // calculate test F-Score
389                        double precision = correctPosClassified + negAsPos == 0 ? 0 : correctPosClassified / (double) (correctPosClassified + negAsPos);
390                        double recall = correctPosClassified / (double) testPos.size();
391                        //              System.out.println(precision);System.out.println(recall);
392                        fMeasure.addNumber(100*Heuristics.getFScore(recall, precision));
393                        length.addNumber(OWLClassExpressionUtils.getLength(concept));
394                        output += "test set errors pos: " + tmp2 + "\n";
395                        output += "test set errors neg: " + tmp3 + "\n";
396
397                }else{
398
399                        //if (la instanceof TDTClassifier)
400                        //DSTTDTClassifier tdt= la
401                        //                      DSTTDTClassifier tdt= (DSTTDTClassifier) la;
402
403                        System.out.println("Training");
404                        trainingCorrectPosClassified = getCorrectClassifications(1, trainPos, la);
405
406                        trainingCorrectNegClassified = getCorrectClassifications(-1, trainNeg, la);
407                        trainingCorrectUndClassified = getCorrectClassifications(0, trainUnd, la);
408                        trainingCorrectExamples = trainingCorrectUndClassified+trainingCorrectPosClassified + trainingCorrectNegClassified;
409                        trainingSize = trainPos.size()+
410                                        trainNeg.size()+trainUnd.size();
411                        trainingAccuracy = 100*((double)trainingCorrectExamples/trainingSize);
412
413                        accuracyTraining.addNumber(trainingAccuracy);
414                        negAsPosTraining =   getWrongClassification(trainNeg, la);
415
416                        posAsNegTraining=    getWrongClassificationNeg (trainPos, la);   ///rs.hasType(factory.getOWLObjectComplementOf(concept), testPos).size(); // commission cases
417
418                        posAsUndTraining= getOmittedClassification(1, trainPos, la); //omission cases
419
420                        negAsUndTraining= getOmittedClassification(-1, trainNeg, la);
421
422                        //System.out.println();
423                        //System.out.println("Training "+posAsUndTraining+ "-----------"+ negAsUndTraining);
424                        undAsPosTraining = getInductionClassification(trainUnd, la);  //positive and negative induction
425                        undAsNegTraining=  0; //getInductionClassified(-1,0,trainUnd, tdt);
426
427                        //System.out.println("Training");
428                        commissions = negAsPosTraining+posAsNegTraining;
429                        //System.out.println("Training commissions: "+ commissions);
430                        trainingCommission=100*((double)commissions/trainingSize);
431                        inductions = undAsPosTraining+undAsNegTraining;
432
433                        trainingInduction=100*((double)inductions/trainingSize);
434                        //System.out.println("Training inductions: "+ trainingInduction);
435                        omissions = posAsUndTraining+negAsUndTraining;
436
437                        trainingOmission=100*((double)omissions/trainingSize);
438                        //System.out.println("Training omissions: "+ trainingOmission);
439
440                        commissionTraining.addNumber(trainingCommission);
441                        inductionTraining.addNumber(trainingInduction);
442                        omissionTraining.addNumber(trainingOmission);
443
444                        correctPosClassified = getCorrectClassifications(1, testPos, la); //getCorrectPosClassified(rs, concept, testPos);
445                        correctNegClassified = getCorrectClassifications(-1, testNeg, la);
446                        correctUndClassified = getCorrectClassifications(0, testUnd, la);
447                        correctExamples = correctUndClassified+correctPosClassified + correctNegClassified;
448
449                        //                       System.out.println("Correct p:"+ correctPosClassified +"n: "+ correctNegClassified+ " u: "+ correctUndClassified);
450                        //                       System.out.println("        p:"+ testPos.size() +"n: "+ testNeg.size()+ " u: "+ testUnd.size());
451                        //                       System.out.println("Correct examples:"+ correctExamples +"test size: "+ testsize);
452                        currAccuracy = 100*((double)correctExamples/testsize);
453                        accuracy.addNumber(currAccuracy);
454
455                        // commission omission and induction on the test set
456                        negAsPos = getWrongClassification(testNeg, la);
457                        posAsNeg= getWrongClassificationNeg( testPos, la); // commission cases
458
459                        undAsPos = getInductionClassification(testUnd, la);//rs.hasType(concept, testUnd).size();
460                        //System.out.println("trainUnd size"+ trainUnd.size());
461                        undAsNeg=  0;   // in order to avoid variable elimination. To be rewritten
462                        //getInductionClassified(0,trainUnd, tdt);  //induction cases
463
464                        posAsUnd= getOmittedClassification(1, testPos, la); //omission cases
465                        negAsUnd= getOmittedClassification(-1, testNeg, la);
466                        // System.out.println("Omissions"+(posAsUnd+negAsUnd));
467//                      System.out.println("Test:  Omissions:"+ (posAsUnd+ negAsUnd)+ "/"+ testsize);
468//                      System.out.println("Match:"+ (correctPosClassified+correctNegClassified+correctUndClassified)+ "/"+ testsize);
469//                      System.out.println("       Commissions: "+(negAsPos+posAsNeg)+ "/"+ testsize);
470//                  System.out.println("       Induction: "+(undAsPos+undAsNeg)+ "/"+ testsize);
471                        currCommission=100*(((double)(negAsPos+posAsNeg))/testsize);
472                        currInduction=100*(((double)(undAsPos+undAsNeg))/testsize);
473                        currOmission=100*(((double)(posAsUnd+negAsUnd))/testsize);
474                        //                      System.out.println( "C: "+ currAccuracy);
475                        //                      System.out.println( "C: "+ currCommission);
476                        //                      System.out.println( "O: "+ currOmission);
477                        //                      System.out.println( "I:"+ currInduction);
478                        //                      System.exit(1);
479                        commission.addNumber(currCommission);
480                        omission.addNumber(currOmission);
481                        induction.addNumber(currInduction);
482
483                        double precisionTraining = trainingCorrectPosClassified + negAsPosTraining == 0 ? 0 : trainingCorrectPosClassified / (double) (trainingCorrectPosClassified + negAsPosTraining);
484                        double recallTraining = trainingCorrectPosClassified / (double) trainPos.size();
485                        System.out.println(precisionTraining +"----"+recallTraining);
486
487                        fMeasureTraining.addNumber(100*Heuristics.getFScore(recallTraining, precisionTraining));
488
489                        // calculate test F-Score
490                        double precision = correctPosClassified + negAsPos == 0 ? 0 : correctPosClassified / (double) (correctPosClassified + negAsPos);
491                        double recall = correctPosClassified / (double) testPos.size();
492                        //              System.out.println(precision);System.out.println(recall);
493                        fMeasure.addNumber(100*Heuristics.getFScore(recall, precision));
494                        length.addNumber(OWLClassExpressionUtils.getLength(concept));
495
496                }
497
498                //System.exit(0);
499                output += "fold " + currFold + ":" + "\n";
500                output += "  training: " + pos.size() + " positive, " + neg.size() + " negative examples and "+ und.size() + " uncertain examples";
501                output += "  testing: " + correctPosClassified + "/" + testPos.size() + " correct positives, "
502                                + correctNegClassified + "/" + testNeg.size() + " correct negatives " + correctUndClassified+"/"+ testUnd.size()+" correct uncertain \n";
503                output += "  concept: " + concept.toString().replace("\n", " ") + "\n";
504                output += "  match: " + df.format(currAccuracy) + "% (" + df.format(trainingAccuracy) + "% on training set)" + "\n";
505                output += "  commission: " + df.format(currCommission) + "% (" + df.format(trainingCommission) + "% on training set)" + "\n";
506                output += "  omission: " + df.format(currOmission) + "% (" + df.format(trainingOmission) + "% on training set)" + "\n";
507                output += "  induction: " + df.format(currInduction) + "% (" + df.format(trainingInduction) + "% on training set)" + "\n";
508                output += "  length: " + df.format(OWLClassExpressionUtils.getLength(concept)) + "\n";
509                output += "  runtime: " + df.format(algorithmDuration/(double)1000000000) + "s" + "\n";
510
511                System.out.println(output);
512
513                outputWriter(output);
514        }
515
516        private int getCorrectClassifications( int groundtruth,  Set<OWLIndividual> set, AbstractCELA la) {
517                int trainingCorrectClassified=0;
518                for (OWLIndividual indTestEx: set){
519                        int label =0;
520                        if (la instanceof DSTTDTClassifier)
521                                label=((DSTTDTClassifier)la).classifyExamplesDST(indTestEx, ((DSTTDTClassifier)la).getCurrentmodel());
522                        else if (la instanceof TDTClassifier){
523                                label= ((TDTClassifier)la).classifyExample(indTestEx, ((TDTClassifier)la).getCurrentmodel());}
524                        //                      if (label==groundtruth)
525                        //                              trainingCorrectClassified++;
526                        //
527                        //              }
528                        //              System.out.println(" GetCorrectClassified Label: "+label);
529                        if (label == groundtruth) {
530                         //System.out.println("\t  Ground truth "+label+" Predicted "+ groundtruth+ ": matched");
531
532                                trainingCorrectClassified++;
533                        }
534
535                }
536                //              System.out.println("End round 1");
537                return trainingCorrectClassified;
538        }
539
540        private int getWrongClassification(Set<OWLIndividual> set, AbstractCELA la) {
541                //System.out.println("GetWrongClassified");
542
543                int trainingWrongClassified=0;
544                for (OWLIndividual indTestEx: set){
545                        int label = 0;
546                        if (la instanceof DSTTDTClassifier) {
547                                label=((DSTTDTClassifier)la).classifyExamplesDST(indTestEx, ((DSTTDTClassifier)la).getCurrentmodel());
548                                //System.out.println("---->"+label);
549                        }
550                        else if (la instanceof TDTClassifier){
551                                label=((TDTClassifier)la).classifyExample(indTestEx, ((TDTClassifier)la).getCurrentmodel());
552                                //System.out.println("****>"+label);
553                        }
554                        //                      System.out.println(" GetWrongClassified neg aS POS Label"+label);
555                        //System.out.println("\t Ground truth +1 Predicted "+ label);
556                        //System.out.println(((label==+1)));
557                        if ((label==+1)) {
558                                
559                                trainingWrongClassified++;
560                        }
561
562                }
563                //System.out.println("# errors: "+ trainingWrongClassified);
564                return trainingWrongClassified;
565        }
566
567        private int getWrongClassificationNeg( Set<OWLIndividual> set, AbstractCELA la) {
568                //System.out.println("\n GetWrongClassified Neg");
569
570                int trainingWrongClassified=0;
571                for (OWLIndividual indTestEx: set){
572                        int label = 0;
573
574                        if (la instanceof DSTTDTClassifier) {
575                                label=((DSTTDTClassifier)la).classifyExamplesDST(indTestEx, ((DSTTDTClassifier)la).getCurrentmodel());
576                                //System.out.println("---->"+label);
577                        }
578                        else if (la instanceof TDTClassifier){
579                                label=((TDTClassifier)la).classifyExample(indTestEx, ((TDTClassifier)la).getCurrentmodel());
580                                //System.out.println("*****>"+label);
581                        }
582                        //tdt.classifyExamplesDST(indTestEx, tdt.getCurrentmodel());
583                        //System.out.println("label: "+label +" groundtruth +1");
584                        //System.out.println(((label!=-1) && (label!=0)));
585                        if ((label==-1)) {
586                                //                              System.out.println("POS s neg label: "+label);
587                                //System.out.println("\t Ground truth "+groundtruth+" Predicted "+ label+ ":committed");
588                                trainingWrongClassified++;
589                        }
590
591                }
592                
593                System.out.println("# errors: "+ trainingWrongClassified);
594                return trainingWrongClassified;
595        }
596
597        private int getOmittedClassification( int groundtruth, Set<OWLIndividual> set, AbstractCELA la) {
598                //System.out.println("Groundtruth: "+ groundtruth);
599                int trainingWrongClassified=0;
600                for (OWLIndividual indTestEx: set){
601                        int label = 0;
602                        if (la instanceof DSTTDTClassifier) {
603                                label=((DSTTDTClassifier)la).classifyExamplesDST(indTestEx, ((DSTTDTClassifier)la).getCurrentmodel());
604                        
605                        }else if (la instanceof TDTClassifier){
606                                label=((TDTClassifier)la).classifyExample(indTestEx, ((TDTClassifier)la).getCurrentmodel());
607                        }
608                                //tdt.classifyExamplesDST(indTestEx, tdt.getCurrentmodel());
609                        //System.out.println( "LAbel:"+ label +" GroundTruth:"+groundtruth); //"Omission? "+((label==0)&&(groundtruth!=0)));
610                        if ((label==0)&&(groundtruth !=0))
611                                trainingWrongClassified++;
612                }
613                
614                //System.out.println("Omissions: "+ trainingWrongClassified);
615                return trainingWrongClassified;
616        }
617
618        private int getInductionClassification(Set<OWLIndividual> set, AbstractCELA la) {
619                int trainingWrongClassified=0;
620                for (OWLIndividual indTestEx: set){
621                        int label = 0;
622                        if (la instanceof DSTTDTClassifier)
623                                label=((DSTTDTClassifier)la).classifyExamplesDST(indTestEx, ((DSTTDTClassifier)la).getCurrentmodel());
624                        else if (la instanceof TDTClassifier)
625                                label=((TDTClassifier)la).classifyExample(indTestEx, ((TDTClassifier)la).getCurrentmodel());
626                        //tdt.classifyExamplesDST(indTestEx, tdt.getCurrentmodel());
627                        if ((label!=0))
628                                trainingWrongClassified++;
629                }
630                //System.out.println("****Inductions: "+trainingWrongClassified +"/"+ set.size());
631                return trainingWrongClassified;
632        }
633
634        private int getCorrectUndClassified(AbstractReasonerComponent rs, OWLClassExpression concept, Set<OWLIndividual> testUnd) {
635                OWLDataFactory df = new OWLDataFactoryImpl();
636                OWLObjectComplementOf complementOfConcept = df.getOWLObjectComplementOf(concept);
637                int nOfUnc=0;
638                for (OWLIndividual ind:testUnd){
639                        if ((!rs.hasType(concept, ind)) && !(rs.hasType(complementOfConcept, ind)))
640                                nOfUnc++;
641
642                }
643
644                return nOfUnc;
645        }
646
647        @Override
648        public int getCorrectNegClassified(AbstractReasonerComponent rs, OWLClassExpression concept, Set<OWLIndividual> testSetNeg){
649                // for dealing explictly with the Open World Assumption
650                OWLDataFactory df = new OWLDataFactoryImpl();
651                OWLObjectComplementOf complementOfConcept = df.getOWLObjectComplementOf(concept); // the real complement if it is exist
652                return  (rs.hasType (complementOfConcept, testSetNeg)).size();
653        }
654
655}
656