Changeset 9945:ba7e22d4ebc5 in orange


Ignore:
Timestamp:
02/07/12 19:26:14 (2 years ago)
Author:
markotoplak
Branch:
default
rebase_source:
42219203d685928c8bb777610c858b7cc732a968
Message:

Modernized some example scripts.

Location:
docs/reference/rst/code
Files:
1 deleted
10 edited

Legend:

Unmodified
Added
Removed
  • docs/reference/rst/code/datatable1.py

    r9927 r9945  
    1 # Description: Shows how to construct an orange.ExampleTable out of nothing 
     1# Description: Shows how to construct an Orange.data.Table out of nothing 
    22# Category:    basic classes 
    33# Classes:     ExampleTable, Domain 
  • docs/reference/rst/code/ensemble-forest-measure.py

    r9638 r9945  
    1818    #call by attribute index 
    1919    imp0 = measure(0, iris)  
    20     #call by orange.Variable 
     20    #call with a Descriptor 
    2121    imp1 = measure(iris.domain.attributes[1], iris) 
    2222    print "first: %0.2f, second: %0.2f\n" % (imp0, imp1) 
  • docs/reference/rst/code/imputation-complex.py

    r9638 r9945  
    122122def compute_span(ex, rw): 
    123123    if ex["TYPE"] == "WOOD" or ex["PURPOSE"] == "WALK": 
    124         return orange.Value(span_var, "SHORT") 
     124        return Orange.data.Value(span_var, "SHORT") 
    125125    else: 
    126         return orange.Value(span_var, "MEDIUM") 
     126        return Orange.data.Value(span_var, "MEDIUM") 
    127127 
    128128imputer.models[bridges.domain.index("SPAN")] = compute_span 
  • docs/reference/rst/code/lookup-table.py

    r9927 r9945  
    1 # Description: Shows how to construct an orange.ClassifierFromExampleTable 
     1# Description: Shows how to construct an Orange.classification.lookup.LookupLearner 
    22# Category:    classification, lookup classifiers, constructive induction, feature construction 
    33# Classes:     ClassifierByExampleTable, LookupLearner 
  • docs/reference/rst/code/scoring-relief-caching.py

    r9823 r9945  
    55# Referenced:  MeasureAttribute.htm 
    66 
    7 import orange 
    8 iris = orange.ExampleTable("iris") 
     7import Orange 
     8iris = Orange.data.Table("iris") 
    99 
    10 r1 = orange.MeasureAttribute_relief() 
    11 r2 = orange.MeasureAttribute_relief(check_cached_data = False) 
     10r1 = Orange.feature.scoring.Relief() 
     11r2 = Orange.feature.scoring.Relief(check_cached_data = False) 
    1212 
    1313print "%.3f\t%.3f" % (r1(0, iris), r2(0, iris)) 
  • docs/reference/rst/code/statExample0.py

    r9372 r9945  
    1 import orange, orngTest, orngTree 
     1import Orange 
    22 
    3 learners = [orange.BayesLearner(name = "bayes"), 
    4             orngTree.TreeLearner(name="tree"), 
    5             orange.MajorityLearner(name="majrty")] 
     3learners = [ Orange.classification.bayes.NaiveLearner(name = "bayes"), 
     4             Orange.classification.tree.TreeLearner(name="tree"), 
     5             Orange.classification.majority.MajorityLearner(name="majrty")] 
    66 
    7 voting = orange.ExampleTable("voting") 
    8 res = orngTest.crossValidation(learners, voting) 
     7voting = Orange.data.Table("voting") 
     8res = Orange.evaluation.testing.cross_validation(learners, voting) 
    99 
    10 vehicle = orange.ExampleTable("vehicle") 
    11 resVeh = orngTest.crossValidation(learners, vehicle) 
     10vehicle = Orange.data.Table("vehicle") 
     11resVeh = Orange.evaluation.testing.cross_validation(learners, vehicle) 
  • docs/reference/rst/code/statExample1.py

    r9372 r9945  
    1 import orange, orngTest, orngTree 
     1import Orange 
    22 
    3 learners = [orange.BayesLearner(name = "bayes"), 
    4             orngTree.TreeLearner(name="tree"), 
    5             orange.MajorityLearner(name="majrty")] 
     3learners = [ Orange.classification.bayes.NaiveLearner(name = "bayes"), 
     4             Orange.classification.tree.TreeLearner(name="tree"), 
     5             Orange.classification.majority.MajorityLearner(name="majrty")] 
    66 
    7 voting = orange.ExampleTable("voting") 
    8 res = orngTest.crossValidation(learners, voting) 
     7voting = Orange.data.Table("voting") 
     8res = Orange.evaluation.testing.cross_validation(learners, voting) 
    99 
    10 vehicle = orange.ExampleTable("vehicle") 
    11 resVeh = orngTest.crossValidation(learners, vehicle) 
     10vehicle = Orange.data.Table("vehicle") 
     11resVeh = Orange.evaluation.testing.cross_validation(learners, vehicle) 
    1212 
    1313import orngStat 
    1414 
    15 CAs = orngStat.CA(res) 
    16 APs = orngStat.AP(res) 
    17 Briers = orngStat.BrierScore(res) 
    18 ISs = orngStat.IS(res) 
     15CAs = Orange.evaluation.scoring.CA(res) 
     16APs = Orange.evaluation.scoring.AP(res) 
     17Briers = Orange.evaluation.scoring.Brier_score(res) 
     18ISs = Orange.evaluation.scoring.IS(res) 
    1919 
    2020print 
  • docs/reference/rst/code/statExamples.py

    r9372 r9945  
    44# Referenced:  orngStat.htm 
    55 
    6 import orange, orngTest, orngTree 
     6import Orange 
    77 
    8 learners = [orange.BayesLearner(name = "bayes"), 
    9             orngTree.TreeLearner(name="tree"), 
    10             orange.MajorityLearner(name="majrty")] 
     8learners = [ Orange.classification.bayes.NaiveLearner(name = "bayes"), 
     9             Orange.classification.tree.TreeLearner(name="tree"), 
     10             Orange.classification.majority.MajorityLearner(name="majrty")] 
    1111 
    12 voting = orange.ExampleTable("voting") 
    13 res = orngTest.crossValidation(learners, voting) 
     12voting = Orange.data.Table("voting") 
     13res = Orange.evaluation.testing.cross_validation(learners, voting) 
    1414 
    15 vehicle = orange.ExampleTable("vehicle") 
    16 resVeh = orngTest.crossValidation(learners, vehicle) 
     15vehicle = Orange.data.Table("vehicle") 
     16resVeh = Orange.evaluation.testing.cross_validation(learners, vehicle) 
    1717 
    18 import orngStat 
     18import Orange.evaluation.scoring 
    1919 
    20 CAs = orngStat.CA(res) 
    21 APs = orngStat.AP(res) 
    22 Briers = orngStat.BrierScore(res) 
    23 ISs = orngStat.IS(res) 
     20CAs = Orange.evaluation.scoring.CA(res) 
     21APs = Orange.evaluation.scoring.AP(res) 
     22Briers = Orange.evaluation.scoring.Brier_score(res) 
     23ISs = Orange.evaluation.scoring.IS(res) 
    2424 
    2525print 
     
    2929 
    3030 
    31 CAs = orngStat.CA(res, reportSE=True) 
    32 APs = orngStat.AP(res, reportSE=True) 
    33 Briers = orngStat.BrierScore(res, reportSE=True) 
    34 ISs = orngStat.IS(res, reportSE=True) 
     31CAs = Orange.evaluation.scoring.CA(res, reportSE=True) 
     32APs = Orange.evaluation.scoring.AP(res, reportSE=True) 
     33Briers = Orange.evaluation.scoring.Brier_score(res, reportSE=True) 
     34ISs = Orange.evaluation.scoring.IS(res, reportSE=True) 
    3535 
    3636print 
     
    4141 
    4242print 
    43 cm = orngStat.confusionMatrices(res)[0] 
     43cm = Orange.evaluation.scoring.confusion_matrices(res)[0] 
    4444print "Confusion matrix for naive Bayes:" 
    4545print "TP: %i, FP: %i, FN: %s, TN: %i" % (cm.TP, cm.FP, cm.FN, cm.TN) 
    4646 
    4747print 
    48 cm = orngStat.confusionMatrices(res, cutoff=0.2)[0] 
     48cm = Orange.evaluation.scoring.confusion_matrices(res, cutoff=0.2)[0] 
    4949print "Confusion matrix for naive Bayes:" 
    5050print "TP: %i, FP: %i, FN: %s, TN: %i" % (cm.TP, cm.FP, cm.FN, cm.TN) 
    5151 
    5252print 
    53 cm = orngStat.confusionMatrices(resVeh, vehicle.domain.classVar.values.index("van"))[0] 
     53cm = Orange.evaluation.scoring.confusion_matrices(resVeh, vehicle.domain.class_var.values.index("van"))[0] 
    5454print "Confusion matrix for naive Bayes for 'van':" 
    5555print "TP: %i, FP: %i, FN: %s, TN: %i" % (cm.TP, cm.FP, cm.FN, cm.TN) 
    5656 
    5757print 
    58 cm = orngStat.confusionMatrices(resVeh, vehicle.domain.classVar.values.index("opel"))[0] 
     58cm = Orange.evaluation.scoring.confusion_matrices(resVeh, vehicle.domain.class_var.values.index("opel"))[0] 
    5959print "Confusion matrix for naive Bayes for 'opel':" 
    6060print "TP: %i, FP: %i, FN: %s, TN: %i" % (cm.TP, cm.FP, cm.FN, cm.TN) 
    6161 
    6262print 
    63 cm = orngStat.confusionMatrices(resVeh)[0] 
    64 classes = vehicle.domain.classVar.values 
     63cm = Orange.evaluation.scoring.confusion_matrices(resVeh)[0] 
     64classes = vehicle.domain.class_var.values 
    6565print "\t"+"\t".join(classes) 
    6666for className, classConfusions in zip(classes, cm): 
    6767    print ("%s" + ("\t%i" * len(classes))) % ((className, ) + tuple(classConfusions)) 
    6868 
    69 cm = orngStat.confusionMatrices(res) 
     69cm = Orange.evaluation.scoring.confusion_matrices(res) 
    7070print 
    7171print "Sensitivity and specificity for 'voting'" 
    7272print "method\tsens\tspec" 
    7373for l in range(len(learners)): 
    74     print "%s\t%5.3f\t%5.3f" % (learners[l].name, orngStat.sens(cm[l]), orngStat.spec(cm[l])) 
     74    print "%s\t%5.3f\t%5.3f" % (learners[l].name, Orange.evaluation.scoring.sens(cm[l]), Orange.evaluation.scoring.spec(cm[l])) 
    7575 
    76 cm = orngStat.confusionMatrices(resVeh, vehicle.domain.classVar.values.index("van")) 
     76cm = Orange.evaluation.scoring.confusion_matrices(resVeh, vehicle.domain.class_var.values.index("van")) 
    7777print 
    7878print "Sensitivity and specificity for 'vehicle=van'" 
    7979print "method\tsens\tspec" 
    8080for l in range(len(learners)): 
    81     print "%s\t%5.3f\t%5.3f" % (learners[l].name, orngStat.sens(cm[l]), orngStat.spec(cm[l])) 
     81    print "%s\t%5.3f\t%5.3f" % (learners[l].name, Orange.evaluation.scoring.sens(cm[l]), Orange.evaluation.scoring.spec(cm[l])) 
    8282 
    8383print 
    8484print "AUC (voting)" 
    8585 
    86 AUCs = orngStat.AUC(res) 
     86AUCs = Orange.evaluation.scoring.AUC(res) 
    8787for l in range(len(learners)): 
    8888    print "%10s: %5.3f" % (learners[l].name, AUCs[l]) 
     
    9292print "AUC for vehicle using weighted single-out method" 
    9393print "bayes\ttree\tmajority" 
    94 AUCs = orngStat.AUC(resVeh, orngStat.AUC.WeightedOneAgainstAll) 
     94AUCs = Orange.evaluation.scoring.AUC(resVeh, Orange.evaluation.scoring.AUC.WeightedOneAgainstAll) 
    9595print "%5.3f\t%5.3f\t%5.3f" % tuple(AUCs) 
    9696 
     
    100100print " " *25 + "  \tbayes\ttree\tmajority" 
    101101for i in range(4): 
    102     AUCs = orngStat.AUC(resVeh, i) 
     102    AUCs = Orange.evaluation.scoring.AUC(resVeh, i) 
    103103    print "%25s: \t%5.3f\t%5.3f\t%5.3f" % ((methods[i], ) + tuple(AUCs)) 
    104104 
    105105 
    106 classes = vehicle.domain.classVar.values 
    107 classDist = orange.Distribution(vehicle.domain.classVar, vehicle) 
     106classes = vehicle.domain.class_var.values 
     107classDist = Orange.statistics.distribution.Distribution(vehicle.domain.class_var, vehicle) 
    108108 
    109109print 
    110110print "AUC for detecting class 'van' in 'vehicle'" 
    111 AUCs = orngStat.AUC_single(resVeh, classIndex = vehicle.domain.classVar.values.index("van")) 
     111AUCs = Orange.evaluation.scoring.AUC_single(resVeh, classIndex = vehicle.domain.class_var.values.index("van")) 
    112112print "%5.3f\t%5.3f\t%5.3f" % tuple(AUCs) 
    113113 
     
    115115print "AUCs for detecting various classes in 'vehicle'" 
    116116for c,s in enumerate(classes): 
    117     print "%s (%5.3f) vs others: \t%5.3f\t%5.3f\t%5.3f" % ((s, classDist[c] ) + tuple(orngStat.AUC_single(resVeh, c))) 
     117    print "%s (%5.3f) vs others: \t%5.3f\t%5.3f\t%5.3f" % ((s, classDist[c] ) + tuple(Orange.evaluation.scoring.AUC_single(resVeh, c))) 
    118118 
    119119print 
    120 classes = vehicle.domain.classVar.values 
    121 AUCmatrix = orngStat.AUC_matrix(resVeh)[0] 
     120classes = vehicle.domain.class_var.values 
     121AUCmatrix = Orange.evaluation.scoring.AUC_matrix(resVeh)[0] 
    122122print "\t"+"\t".join(classes[:-1]) 
    123123for className, AUCrow in zip(classes[1:], AUCmatrix[1:]): 
     
    128128for c1, s1 in enumerate(classes): 
    129129    for c2 in range(c1): 
    130         print "%s vs %s: \t%5.3f\t%5.3f\t%5.3f" % ((s1, classes[c2]) + tuple(orngStat.AUC_pair(resVeh, c1, c2))) 
     130        print "%s vs %s: \t%5.3f\t%5.3f\t%5.3f" % ((s1, classes[c2]) + tuple(Orange.evaluation.scoring.AUC_pair(resVeh, c1, c2))) 
    131131 
    132132 
    133 ri2 = orange.MakeRandomIndices2(voting, 0.6) 
     133ri2 = Orange.data.sample.SubsetIndices2(voting, 0.6) 
    134134train = voting.selectref(ri2, 0) 
    135135test = voting.selectref(ri2, 1) 
    136 res1 = orngTest.learnAndTestOnTestData(learners, train, test) 
     136res1 = Orange.evaluation.testing.learn_and_test_on_test_data(learners, train, test) 
    137137 
    138138print 
    139139print "AUC and SE for voting" 
    140 AUCs = orngStat.AUCWilcoxon(res1) 
     140AUCs = Orange.evaluation.scoring.AUCWilcoxon(res1) 
    141141for li, lrn in enumerate(learners): 
    142142    print "%s: %5.3f+-%5.3f" % (lrn.name, AUCs[li][0], AUCs[li][1]) 
    143143 
    144144print 
    145 print "Difference between naive Bayes and tree: %5.3f+-%5.3f" % tuple(orngStat.compare2AUCs(res1, 0, 1)[2]) 
     145print "Difference between naive Bayes and tree: %5.3f+-%5.3f" % tuple(Orange.evaluation.scoring.compare_2_AUCs(res1, 0, 1)[2]) 
    146146 
    147147print 
    148148print "ROC (first 20 points) for bayes on 'voting'" 
    149 ROC_bayes = orngStat.computeROC(res1)[0] 
     149ROC_bayes = Orange.evaluation.scoring.compute_ROC(res1)[0] 
    150150for t in ROC_bayes[:20]: 
    151151    print "%5.3f\t%5.3f" % t 
  • docs/reference/rst/code/statExamplesGraphRanks.py

    r9372 r9945  
    1 import orange, orngStat 
     1import Orange 
    22 
    33names = ["first", "third", "second", "fourth" ] 
    44avranks =  [1.9, 3.2, 2.8, 3.3 ]  
    5 cd = orngStat.compute_CD(avranks, 30) #tested on 30 datasets 
    6 orngStat.graph_ranks("statExamples-graph_ranks1.png", avranks, names, \ 
     5cd = Orange.evaluation.scoring.compute_CD(avranks, 30) #tested on 30 datasets 
     6Orange.evaluation.scoring.graph_ranks("statExamples-graph_ranks1.png", avranks, names, \ 
    77    cd=cd, width=6, textspace=1.5) 
  • docs/reference/rst/code/transformvalue-d2c.py

    r9924 r9945  
    77e1.getValueFrom = Orange.core.ClassifierFromVar(whichVar = data.domain["e"]) 
    88e1.getValueFrom.transformer = Orange.core.Discrete2Continuous() 
    9 e1.getValueFrom.transformer.value = int(orange.Value(e, "1")) 
     9e1.getValueFrom.transformer.value = int(Orange.data.Value(e, "1")) 
Note: See TracChangeset for help on using the changeset viewer.