Ignore:
Files:
7 added
15 edited

Legend:

Unmodified
Added
Removed
  • Orange/testing/regression/results_reference/datatable_merge.py.txt

    r9954 r10009  
    1 Domain 1:  [a1, a2], {-2:m1, -3:m2} 
    2 Domain 2:  [a1, a3], {-2:m1, -4:m3} 
    3 Merged:    [a1, a2, a3], {-2:m1, -3:m2, -4:m3} 
     1Domain 1:  [a1, a2], {-3:m1, -4:m2} 
     2Domain 2:  [a1, a3], {-3:m1, -5:m3} 
     3Merged:    [a1, a2, a3], {-3:m1, -4:m2, -5:m3} 
    44 
    55   [1, 2], {"m1":3, "m2":4} 
  • Orange/testing/regression/results_reference/discretization.py.txt

    r10016 r10017  
    1616Cut-off points: <2.90000009537, 3.29999995232> 
    1717 
    18 Manual construction of IntervalDiscretizer - single attribute 
     18Manual construction of Interval discretizer - single attribute 
    1919[5.1, '>5.00', 'Iris-setosa'] 
    2020[4.9, '(3.00, 5.00]', 'Iris-setosa'] 
     
    2828[4.9, '(3.00, 5.00]', 'Iris-setosa'] 
    2929 
    30 Manual construction of IntervalDiscretizer - all attributes 
     30Manual construction of Interval discretizer - all attributes 
    3131['>5.00', '(3.00, 5.00]', '<=3.00', '<=3.00', 'Iris-setosa'] 
    3232['(3.00, 5.00]', '<=3.00', '<=3.00', '<=3.00', 'Iris-setosa'] 
     
    4141 
    4242 
    43 Equal interval size discretization 
     43Discretization with equal width intervals 
    4444D_sepal length: <<4.90, [4.90, 5.50), [5.50, 6.10), [6.10, 6.70), [6.70, 7.30), >7.30> 
    4545D_sepal width: <<2.40, [2.40, 2.80), [2.80, 3.20), [3.20, 3.60), [3.60, 4.00), >4.00> 
     
    5858 
    5959 
    60 Quartile discretization 
     60Quartile (equal frequency) discretization 
    6161D_sepal length: <<=4.95, (4.95, 5.35], (5.35, 5.75], (5.75, 6.25], (6.25, 6.65], >6.65> 
    6262D_sepal width: <<=2.65, (2.65, 2.85], (2.85, 3.05], (3.05, 3.25], (3.25, 3.45], >3.45> 
     
    7070 
    7171 
    72 Manual construction of EquiDistDiscretizer - all attributes 
     72Manual construction of EqualWidth - all attributes 
    7373['>5.00', '[3.00, 4.00)', '<2.00', '<2.00', 'Iris-setosa'] 
    7474['[4.00, 5.00)', '[3.00, 4.00)', '<2.00', '<2.00', 'Iris-setosa'] 
     
    8282['[4.00, 5.00)', '[3.00, 4.00)', '<2.00', '<2.00', 'Iris-setosa'] 
    8383 
    84 Fayyad-Irani discretization 
     84Fayyad-Irani entropy-based discretization 
    8585sepal length: <5.5, 6.09999990463> 
    8686sepal width: <2.90000009537, 3.29999995232> 
     
    8989 
    9090 
    91 Bi-Modal discretization on binary problem 
     91Bi-modal discretization on a binary problem 
    9292sepal length: <5.40000009537, 6.19999980927> 
    9393sepal width: <2.0, 2.90000009537> 
     
    9696 
    9797 
    98 Bi-Modal discretization on binary problem 
     98Bi-modal discretization on a binary problem 
    9999sepal length: (5.400, 6.200] 
    100100sepal width: (2.000, 2.900] 
     
    103103 
    104104 
    105 Entropy discretization on binary problem 
     105Entropy-based discretization on a binary problem 
    106106sepal length: <5.40000009537, 7.0> 
    107107sepal width: <2.90000009537> 
  • Orange/testing/regression/results_reference/instance-metavar.py.txt

    r9954 r10009  
    1 ['young', 'myope', 'no', 'reduced', 'none'], {-2:0.64} 
     1['young', 'myope', 'no', 'reduced', 'none'], {-3:0.64} 
  • Orange/testing/regression/results_reference/instance_merge.py.txt

    r9954 r10009  
    11First example:  [1, 2], {"m1":3, "m2":4} 
    22Second example:  [1, 2.5], {"m1":3, "m3":4.5} 
    3 Merge:  [1, 2.5, 3, ?], {"a2":2, "m2":4, -4:4.50, "n2":?} 
     3Merge:  [1, 2.5, 3, ?], {"a2":2, "m2":4, -5:4.50, "n2":?} 
  • Orange/testing/regression/results_reference/knnInstanceDistance.py.txt

    r9954 r10009  
    11*** Reference instance:  ['young', 'myope', 'no', 'reduced', 'none'] 
    2 ['young', 'myope', 'no', 'reduced', 'none'], {-2:0.00} 
    3 ['young', 'myope', 'no', 'normal', 'soft'], {-2:1.00} 
    4 ['young', 'myope', 'yes', 'reduced', 'none'], {-2:1.00} 
    5 ['pre-presbyopic', 'myope', 'no', 'reduced', 'none'], {-2:1.00} 
    6 ['young', 'hypermetrope', 'no', 'reduced', 'none'], {-2:1.00} 
     2['young', 'myope', 'no', 'reduced', 'none'], {-3:0.00} 
     3['young', 'myope', 'no', 'normal', 'soft'], {-3:1.00} 
     4['young', 'myope', 'yes', 'reduced', 'none'], {-3:1.00} 
     5['pre-presbyopic', 'myope', 'no', 'reduced', 'none'], {-3:1.00} 
     6['young', 'hypermetrope', 'no', 'reduced', 'none'], {-3:1.00} 
  • Orange/testing/regression/results_reference/lasso-example.py.txt

    r9954 r10009  
    1 Actual: 24.00, predicted: 26.54  
    2 Actual: 21.60, predicted: 23.85  
    3 Actual: 34.70, predicted: 26.35  
    4 Actual: 33.40, predicted: 25.73  
    5 Actual: 36.20, predicted: 25.55  
     1Actual: 24.00, predicted: 24.87  
     2Actual: 21.60, predicted: 23.56  
     3Actual: 34.70, predicted: 25.73  
     4Actual: 33.40, predicted: 25.34  
     5Actual: 36.20, predicted: 25.30  
    66  Variable  Coeff Est  Std Error          p 
    77 Intercept     22.533 
    8         RM      1.962      0.859      0.000   *** 
    9        AGE     -0.007      0.003      0.160       
    10    PTRATIO     -0.627      0.193      0.000   *** 
    11          B      0.002      0.002      0.240       
    12      LSTAT     -0.174      0.103      0.000   *** 
     8      CRIM     -0.003      0.022      0.530       
     9       NOX     -1.563      0.900      0.220       
     10        RM      1.928      0.871      0.000   *** 
     11       TAX     -0.000      0.001      0.450       
     12   PTRATIO     -0.220      0.189      0.060     . 
     13     LSTAT     -0.136      0.099      0.000   *** 
    1314Signif. codes:  0 *** 0.001 ** 0.01 * 0.05 . 0.1 empty 1 
    1415 
    1516 
    16 For 8 variables the regression coefficient equals 0:  
    17 CRIM 
     17For 7 variables the regression coefficient equals 0:  
    1818ZN 
    1919INDUS 
    2020CHAS 
    21 NOX 
     21AGE 
    2222DIS 
    2323RAD 
    24 TAX 
     24B 
  • Orange/testing/regression/results_reference/svm-recursive-feature-elimination.py.txt

    r9954 r10009  
    1 [alpha 0, alpha 7, alpha 14, alpha 21, alpha 28, alpha 35, alpha 42, alpha 49, alpha 56, alpha 63, alpha 70, alpha 77, alpha 84, alpha 91, alpha 98, alpha 105, alpha 112, alpha 119, Elu 0, Elu 30, Elu 60, Elu 90, Elu 120, Elu 150, Elu 180, Elu 210, Elu 240, Elu 270, Elu 300, Elu 330, Elu 360, Elu 390, cdc15 10, cdc15 30, cdc15 50, cdc15 70, cdc15 90, cdc15 110, cdc15 130, cdc15 150, cdc15 170, cdc15 190, cdc15 210, cdc15 230, cdc15 250, cdc15 270, cdc15 290, spo 0, spo 2, spo 5, spo 7, spo 9, spo 11, spo5 2, spo5 7, spo5 11, spo- early, spo- mid, heat 0, heat 10, heat 20, heat 40, heat 80, heat 160, dtt 15, dtt 30, dtt 60, dtt 120, cold 0, cold 20, cold 40, cold 160, diau a, diau b, diau c, diau d, diau e, diau f, diau g, function], {-2:gene} 
    2 [Elu 120, cdc15 150, spo 5, spo- early, spo- mid, heat 10, cold 160, diau e, diau f, diau g, function], {-2:gene} 
     1[alpha 0, alpha 7, alpha 14, alpha 21, alpha 28, alpha 35, alpha 42, alpha 49, alpha 56, alpha 63, alpha 70, alpha 77, alpha 84, alpha 91, alpha 98, alpha 105, alpha 112, alpha 119, Elu 0, Elu 30, Elu 60, Elu 90, Elu 120, Elu 150, Elu 180, Elu 210, Elu 240, Elu 270, Elu 300, Elu 330, Elu 360, Elu 390, cdc15 10, cdc15 30, cdc15 50, cdc15 70, cdc15 90, cdc15 110, cdc15 130, cdc15 150, cdc15 170, cdc15 190, cdc15 210, cdc15 230, cdc15 250, cdc15 270, cdc15 290, spo 0, spo 2, spo 5, spo 7, spo 9, spo 11, spo5 2, spo5 7, spo5 11, spo- early, spo- mid, heat 0, heat 10, heat 20, heat 40, heat 80, heat 160, dtt 15, dtt 30, dtt 60, dtt 120, cold 0, cold 20, cold 40, cold 160, diau a, diau b, diau c, diau d, diau e, diau f, diau g, function], {-3:gene} 
     2[Elu 120, cdc15 150, spo 5, spo- early, spo- mid, heat 10, cold 160, diau e, diau f, diau g, function], {-3:gene} 
  • Orange/testing/regression/results_tests_20/reference_matrix.py.txt

    r9951 r10011  
    6262 [  0.00000000e+00   1.00000000e+00   1.00000000e+00 ...,   0.00000000e+00 
    6363    0.00000000e+00  -1.00000002e+30]] 
    64 /home/miha/work/orange/Orange/testing/regression/xtest_one.py:78: KernelWarning: attribute 'name' is of unsupported type 
    65   t__officialname = "%s/%s.%s.txt" % (t__outputsdir, t__name, t__sys.platform) 
    66 [1.0 0.0 0.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 2.0 0.0 0.0 1.0 --] 
  • Orange/testing/regression/tests_20/reference_matrix.py

    r9952 r10010  
    4646    except: 
    4747        print "Call '%s' failed" % meth.__name__ 
    48          
     48 
    4949 
    5050 
     
    6767        print t4.domain.attributes, t4.domain.classVar 
    6868        print t4[0] 
    69          
     69 
    7070        print 
    7171    except: 
     
    7373 
    7474zoo = orange.ExampleTable("../datasets/zoo") 
    75 zoo_s = orange.ExampleTable(orange.Domain(zoo.domain.attributes+zoo.domain.getmetas().values(), zoo.domain.classVar), zoo) 
     75zoo_s = orange.ExampleTable(orange.Domain(zoo.domain.attributes + zoo.domain.getmetas().values(), zoo.domain.classVar), zoo) 
    7676n = zoo_s.toNumpy() 
    7777print n[0] 
    78 n = zoo_s.toNumpyMA() 
    79 print n[0][0] 
  • docs/reference/rst/code/discretization-entropy.py

    r9943 r10012  
    99diff = old.difference(new) 
    1010print "Redundant features (%d of %d):" % (len(diff), len(data.domain.features)) 
    11 print ", ".join(x.name for x in diff) 
     11print ", ".join(sorted(x.name for x in diff)) 
  • docs/reference/rst/code/exclude-from-regression.txt

    r9824 r10012  
    55statistics-contingency6.py 
    66correspondence.py 
     7simple_tree_random_forest.py 
  • docs/reference/rst/code/hierarchical-example-2.py

    r9906 r10008  
    66distance = Orange.distance.Euclidean(iris) 
    77for i1, instance1 in enumerate(iris): 
    8     for i2 in range(i1+1, len(iris)): 
     8    for i2 in range(i1 + 1, len(iris)): 
    99        matrix[i1, i2] = distance(instance1, iris[i2]) 
    10          
     10 
    1111clustering = Orange.clustering.hierarchical.HierarchicalClustering() 
    1212clustering.linkage = clustering.Average 
    1313clustering.overwrite_matrix = 1 
    1414root = clustering(matrix) 
     15 
     16def prune(cluster, togo): 
     17    if cluster.branches: 
     18        if togo < 0: 
     19            cluster.branches = None 
     20        else: 
     21            for branch in cluster.branches: 
     22                prune(branch, togo - cluster.height) 
     23 
     24def listOfClusters0(cluster, alist): 
     25    if not cluster.branches: 
     26        alist.append(list(cluster)) 
     27    else: 
     28        for branch in cluster.branches: 
     29            listOfClusters0(branch, alist) 
     30 
     31def listOfClusters(root): 
     32    l = [] 
     33    listOfClusters0(root, l) 
     34    return l 
     35tables = [Orange.data.Table(cluster) for cluster in listOfClusters(root)] 
    1536 
    1637prune(root, 1.4) 
     
    2546        print "%s: %3.0f " % (iris.domain.class_var.values[e], d), 
    2647    print 
    27  
    28 tables = [Orange.data.Table(cluster) for cluster in listOfClusters(root)] 
  • docs/reference/rst/code/svm-linear-weights.py

    r9823 r10012  
    1 from Orange import data  
     1from Orange import data 
    22from Orange.classification import svm 
    33 
    44brown = data.Table("brown-selected") 
    5 classifier = svm.SVMLearner(brown,  
    6                             kernel_type=svm.kernels.Linear,  
     5classifier = svm.SVMLearner(brown, 
     6                            kernel_type=svm.kernels.Linear, 
    77                            normalization=False) 
    88 
    99weights = svm.get_linear_svm_weights(classifier) 
    10 print weights 
     10print sorted(weights) 
    1111 
    1212import pylab as plt 
    1313plt.hist(weights.values()) 
    14   
  • docs/reference/rst/code/transformvalue-d2c.py

    r9987 r10012  
    55 
    66e1 = Orange.feature.Continuous("e=1") 
    7 e1.getValueFrom = Orange.core.ClassifierFromVar(whichVar = data.domain["e"]) 
     7e1.getValueFrom = Orange.core.ClassifierFromVar(whichVar=data.domain["e"]) 
    88e1.getValueFrom.transformer = Orange.data.utils.Discrete2Continuous() 
    9 e1.getValueFrom.transformer.value = int(Orange.data.Value(e, "1")) 
     9 
  • docs/reference/rst/code/transformvalue-nc.py

    r9986 r10012  
    66newattrs = [] 
    77for attr in data.domain.features: 
    8     attr_c = Orange.feature.Continous(attr.name+"_n") 
    9     attr_c.getValueFrom = Orange.core.ClassifierFromVar(whichVar = attr) 
     8    attr_c = Orange.feature.Continuous(attr.name + "_n") 
     9    attr_c.getValueFrom = Orange.core.ClassifierFromVar(whichVar=attr) 
    1010    transformer = Orange.data.utils.NormalizeContinuous() 
    1111    attr_c.getValueFrom.transformer = transformer 
Note: See TracChangeset for help on using the changeset viewer.