Changeset 10304:a7e4c281c527 in orange


Ignore:
Timestamp:
02/18/12 15:58:17 (2 years ago)
Author:
mstajdohar
Branch:
default
Message:

Trimmed some test results.

Files:
1 added
1 deleted
22 edited

Legend:

Unmodified
Added
Removed
  • Orange/testing/regression/results_reference/hierarchical-example-2.py.txt

    r10296 r10304  
    22 
    33 Cluster 0  
     4 
     5[4.5, 2.3, 1.3, 0.3, 'Iris-setosa'] 
     6 
     7 
     8 Cluster 1  
     9 
     10[6.3, 3.3, 6.0, 2.5, 'Iris-virginica'] 
     11[6.4, 3.2, 5.3, 2.3, 'Iris-virginica'] 
     12[6.3, 3.4, 5.6, 2.4, 'Iris-virginica'] 
     13[6.2, 3.4, 5.4, 2.3, 'Iris-virginica'] 
     14[6.8, 3.0, 5.5, 2.1, 'Iris-virginica'] 
     15[6.9, 3.1, 5.4, 2.1, 'Iris-virginica'] 
     16[6.9, 3.1, 5.1, 2.3, 'Iris-virginica'] 
     17[6.7, 3.0, 5.2, 2.3, 'Iris-virginica'] 
     18[6.9, 3.2, 5.7, 2.3, 'Iris-virginica'] 
     19[6.8, 3.2, 5.9, 2.3, 'Iris-virginica'] 
     20[6.7, 3.1, 5.6, 2.4, 'Iris-virginica'] 
     21[6.7, 3.3, 5.7, 2.5, 'Iris-virginica'] 
     22[6.7, 3.3, 5.7, 2.1, 'Iris-virginica'] 
     23[6.3, 2.9, 5.6, 1.8, 'Iris-virginica'] 
     24[6.5, 3.0, 5.5, 1.8, 'Iris-virginica'] 
     25[6.4, 3.1, 5.5, 1.8, 'Iris-virginica'] 
     26[6.5, 3.2, 5.1, 2.0, 'Iris-virginica'] 
     27[6.5, 3.0, 5.2, 2.0, 'Iris-virginica'] 
     28[6.5, 3.0, 5.8, 2.2, 'Iris-virginica'] 
     29[6.4, 2.8, 5.6, 2.1, 'Iris-virginica'] 
     30[6.4, 2.8, 5.6, 2.2, 'Iris-virginica'] 
     31[7.1, 3.0, 5.9, 2.1, 'Iris-virginica'] 
     32[7.3, 2.9, 6.3, 1.8, 'Iris-virginica'] 
     33[7.4, 2.8, 6.1, 1.9, 'Iris-virginica'] 
     34[7.2, 3.2, 6.0, 1.8, 'Iris-virginica'] 
     35[7.2, 3.0, 5.8, 1.6, 'Iris-virginica'] 
     36[7.6, 3.0, 6.6, 2.1, 'Iris-virginica'] 
     37[7.7, 2.8, 6.7, 2.0, 'Iris-virginica'] 
     38[7.7, 3.0, 6.1, 2.3, 'Iris-virginica'] 
     39[7.7, 2.6, 6.9, 2.3, 'Iris-virginica'] 
     40[7.2, 3.6, 6.1, 2.5, 'Iris-virginica'] 
     41[7.7, 3.8, 6.7, 2.2, 'Iris-virginica'] 
     42[7.9, 3.8, 6.4, 2.0, 'Iris-virginica'] 
     43 
     44 
     45 Cluster 2  
     46 
     47[5.1, 3.5, 1.4, 0.2, 'Iris-setosa'] 
     48[5.2, 3.5, 1.5, 0.2, 'Iris-setosa'] 
     49[5.0, 3.4, 1.5, 0.2, 'Iris-setosa'] 
     50[5.1, 3.4, 1.5, 0.2, 'Iris-setosa'] 
     51[5.2, 3.4, 1.4, 0.2, 'Iris-setosa'] 
     52[5.0, 3.6, 1.4, 0.2, 'Iris-setosa'] 
     53[5.1, 3.5, 1.4, 0.3, 'Iris-setosa'] 
     54[5.0, 3.5, 1.3, 0.3, 'Iris-setosa'] 
     55[5.0, 3.2, 1.2, 0.2, 'Iris-setosa'] 
     56[5.0, 3.3, 1.4, 0.2, 'Iris-setosa'] 
     57[4.6, 3.4, 1.4, 0.3, 'Iris-setosa'] 
     58[4.8, 3.4, 1.6, 0.2, 'Iris-setosa'] 
     59[4.8, 3.4, 1.9, 0.2, 'Iris-setosa'] 
     60[5.1, 3.3, 1.7, 0.5, 'Iris-setosa'] 
     61[5.0, 3.4, 1.6, 0.4, 'Iris-setosa'] 
     62[5.0, 3.5, 1.6, 0.6, 'Iris-setosa'] 
     63[5.4, 3.4, 1.7, 0.2, 'Iris-setosa'] 
     64[5.5, 3.5, 1.3, 0.2, 'Iris-setosa'] 
     65[5.4, 3.4, 1.5, 0.4, 'Iris-setosa'] 
     66[4.6, 3.6, 1.0, 0.2, 'Iris-setosa'] 
     67[5.4, 3.9, 1.7, 0.4, 'Iris-setosa'] 
     68[5.4, 3.9, 1.3, 0.4, 'Iris-setosa'] 
     69[5.7, 3.8, 1.7, 0.3, 'Iris-setosa'] 
     70[5.4, 3.7, 1.5, 0.2, 'Iris-setosa'] 
     71[5.3, 3.7, 1.5, 0.2, 'Iris-setosa'] 
     72[5.1, 3.8, 1.5, 0.3, 'Iris-setosa'] 
     73[5.1, 3.8, 1.6, 0.2, 'Iris-setosa'] 
     74[5.1, 3.7, 1.5, 0.4, 'Iris-setosa'] 
     75[5.1, 3.8, 1.9, 0.4, 'Iris-setosa'] 
     76[4.9, 3.0, 1.4, 0.2, 'Iris-setosa'] 
     77[5.0, 3.0, 1.6, 0.2, 'Iris-setosa'] 
     78[4.8, 3.0, 1.4, 0.3, 'Iris-setosa'] 
     79[4.9, 3.1, 1.5, 0.1, 'Iris-setosa'] 
     80[4.9, 3.1, 1.5, 0.1, 'Iris-setosa'] 
     81[4.9, 3.1, 1.5, 0.1, 'Iris-setosa'] 
     82[4.8, 3.0, 1.4, 0.1, 'Iris-setosa'] 
     83[4.8, 3.1, 1.6, 0.2, 'Iris-setosa'] 
     84[4.7, 3.2, 1.3, 0.2, 'Iris-setosa'] 
     85[4.6, 3.2, 1.4, 0.2, 'Iris-setosa'] 
     86[4.7, 3.2, 1.6, 0.2, 'Iris-setosa'] 
     87[4.6, 3.1, 1.5, 0.2, 'Iris-setosa'] 
     88[4.4, 3.2, 1.3, 0.2, 'Iris-setosa'] 
     89[4.4, 2.9, 1.4, 0.2, 'Iris-setosa'] 
     90[4.4, 3.0, 1.3, 0.2, 'Iris-setosa'] 
     91[4.3, 3.0, 1.1, 0.1, 'Iris-setosa'] 
     92[5.8, 4.0, 1.2, 0.2, 'Iris-setosa'] 
     93[5.2, 4.1, 1.5, 0.1, 'Iris-setosa'] 
     94[5.5, 4.2, 1.4, 0.2, 'Iris-setosa'] 
     95[5.7, 4.4, 1.5, 0.4, 'Iris-setosa'] 
     96 
     97 
     98 Cluster 3  
    499 
    5100[7.0, 3.2, 4.7, 1.4, 'Iris-versicolor'] 
     
    70165[5.0, 2.0, 3.5, 1.0, 'Iris-versicolor'] 
    71166[4.9, 2.5, 4.5, 1.7, 'Iris-virginica'] 
    72  
    73  
    74  Cluster 1  
    75  
    76 [5.1, 3.5, 1.4, 0.2, 'Iris-setosa'] 
    77 [5.2, 3.5, 1.5, 0.2, 'Iris-setosa'] 
    78 [5.0, 3.4, 1.5, 0.2, 'Iris-setosa'] 
    79 [5.1, 3.4, 1.5, 0.2, 'Iris-setosa'] 
    80 [5.2, 3.4, 1.4, 0.2, 'Iris-setosa'] 
    81 [5.0, 3.6, 1.4, 0.2, 'Iris-setosa'] 
    82 [5.1, 3.5, 1.4, 0.3, 'Iris-setosa'] 
    83 [5.0, 3.5, 1.3, 0.3, 'Iris-setosa'] 
    84 [5.0, 3.2, 1.2, 0.2, 'Iris-setosa'] 
    85 [5.0, 3.3, 1.4, 0.2, 'Iris-setosa'] 
    86 [4.6, 3.4, 1.4, 0.3, 'Iris-setosa'] 
    87 [4.8, 3.4, 1.6, 0.2, 'Iris-setosa'] 
    88 [4.8, 3.4, 1.9, 0.2, 'Iris-setosa'] 
    89 [5.1, 3.3, 1.7, 0.5, 'Iris-setosa'] 
    90 [5.0, 3.4, 1.6, 0.4, 'Iris-setosa'] 
    91 [5.0, 3.5, 1.6, 0.6, 'Iris-setosa'] 
    92 [5.4, 3.4, 1.7, 0.2, 'Iris-setosa'] 
    93 [5.5, 3.5, 1.3, 0.2, 'Iris-setosa'] 
    94 [5.4, 3.4, 1.5, 0.4, 'Iris-setosa'] 
    95 [4.6, 3.6, 1.0, 0.2, 'Iris-setosa'] 
    96 [5.4, 3.9, 1.7, 0.4, 'Iris-setosa'] 
    97 [5.4, 3.9, 1.3, 0.4, 'Iris-setosa'] 
    98 [5.7, 3.8, 1.7, 0.3, 'Iris-setosa'] 
    99 [5.4, 3.7, 1.5, 0.2, 'Iris-setosa'] 
    100 [5.3, 3.7, 1.5, 0.2, 'Iris-setosa'] 
    101 [5.1, 3.8, 1.5, 0.3, 'Iris-setosa'] 
    102 [5.1, 3.8, 1.6, 0.2, 'Iris-setosa'] 
    103 [5.1, 3.7, 1.5, 0.4, 'Iris-setosa'] 
    104 [5.1, 3.8, 1.9, 0.4, 'Iris-setosa'] 
    105 [4.9, 3.0, 1.4, 0.2, 'Iris-setosa'] 
    106 [5.0, 3.0, 1.6, 0.2, 'Iris-setosa'] 
    107 [4.8, 3.0, 1.4, 0.3, 'Iris-setosa'] 
    108 [4.9, 3.1, 1.5, 0.1, 'Iris-setosa'] 
    109 [4.9, 3.1, 1.5, 0.1, 'Iris-setosa'] 
    110 [4.9, 3.1, 1.5, 0.1, 'Iris-setosa'] 
    111 [4.8, 3.0, 1.4, 0.1, 'Iris-setosa'] 
    112 [4.8, 3.1, 1.6, 0.2, 'Iris-setosa'] 
    113 [4.7, 3.2, 1.3, 0.2, 'Iris-setosa'] 
    114 [4.6, 3.2, 1.4, 0.2, 'Iris-setosa'] 
    115 [4.7, 3.2, 1.6, 0.2, 'Iris-setosa'] 
    116 [4.6, 3.1, 1.5, 0.2, 'Iris-setosa'] 
    117 [4.4, 3.2, 1.3, 0.2, 'Iris-setosa'] 
    118 [4.4, 2.9, 1.4, 0.2, 'Iris-setosa'] 
    119 [4.4, 3.0, 1.3, 0.2, 'Iris-setosa'] 
    120 [4.3, 3.0, 1.1, 0.1, 'Iris-setosa'] 
    121 [5.8, 4.0, 1.2, 0.2, 'Iris-setosa'] 
    122 [5.2, 4.1, 1.5, 0.1, 'Iris-setosa'] 
    123 [5.5, 4.2, 1.4, 0.2, 'Iris-setosa'] 
    124 [5.7, 4.4, 1.5, 0.4, 'Iris-setosa'] 
    125  
    126  
    127  Cluster 2  
    128  
    129 [4.5, 2.3, 1.3, 0.3, 'Iris-setosa'] 
    130  
    131  
    132  Cluster 3  
    133  
    134 [6.3, 3.3, 6.0, 2.5, 'Iris-virginica'] 
    135 [6.4, 3.2, 5.3, 2.3, 'Iris-virginica'] 
    136 [6.3, 3.4, 5.6, 2.4, 'Iris-virginica'] 
    137 [6.2, 3.4, 5.4, 2.3, 'Iris-virginica'] 
    138 [6.8, 3.0, 5.5, 2.1, 'Iris-virginica'] 
    139 [6.9, 3.1, 5.4, 2.1, 'Iris-virginica'] 
    140 [6.9, 3.1, 5.1, 2.3, 'Iris-virginica'] 
    141 [6.7, 3.0, 5.2, 2.3, 'Iris-virginica'] 
    142 [6.9, 3.2, 5.7, 2.3, 'Iris-virginica'] 
    143 [6.8, 3.2, 5.9, 2.3, 'Iris-virginica'] 
    144 [6.7, 3.1, 5.6, 2.4, 'Iris-virginica'] 
    145 [6.7, 3.3, 5.7, 2.5, 'Iris-virginica'] 
    146 [6.7, 3.3, 5.7, 2.1, 'Iris-virginica'] 
    147 [6.3, 2.9, 5.6, 1.8, 'Iris-virginica'] 
    148 [6.5, 3.0, 5.5, 1.8, 'Iris-virginica'] 
    149 [6.4, 3.1, 5.5, 1.8, 'Iris-virginica'] 
    150 [6.5, 3.2, 5.1, 2.0, 'Iris-virginica'] 
    151 [6.5, 3.0, 5.2, 2.0, 'Iris-virginica'] 
    152 [6.5, 3.0, 5.8, 2.2, 'Iris-virginica'] 
    153 [6.4, 2.8, 5.6, 2.1, 'Iris-virginica'] 
    154 [6.4, 2.8, 5.6, 2.2, 'Iris-virginica'] 
    155 [7.1, 3.0, 5.9, 2.1, 'Iris-virginica'] 
    156 [7.3, 2.9, 6.3, 1.8, 'Iris-virginica'] 
    157 [7.4, 2.8, 6.1, 1.9, 'Iris-virginica'] 
    158 [7.2, 3.2, 6.0, 1.8, 'Iris-virginica'] 
    159 [7.2, 3.0, 5.8, 1.6, 'Iris-virginica'] 
    160 [7.6, 3.0, 6.6, 2.1, 'Iris-virginica'] 
    161 [7.7, 2.8, 6.7, 2.0, 'Iris-virginica'] 
    162 [7.7, 3.0, 6.1, 2.3, 'Iris-virginica'] 
    163 [7.7, 2.6, 6.9, 2.3, 'Iris-virginica'] 
    164 [7.2, 3.6, 6.1, 2.5, 'Iris-virginica'] 
    165 [7.7, 3.8, 6.7, 2.2, 'Iris-virginica'] 
    166 [7.9, 3.8, 6.4, 2.0, 'Iris-virginica'] 
    167 Iris-setosa:   0  Iris-versicolor:  50  Iris-virginica:  17  
    168 Iris-setosa:  49  Iris-versicolor:   0  Iris-virginica:   0  
    169167Iris-setosa:   1  Iris-versicolor:   0  Iris-virginica:   0  
    170168Iris-setosa:   0  Iris-versicolor:   0  Iris-virginica:  33  
     169Iris-setosa:  49  Iris-versicolor:   0  Iris-virginica:   0  
     170Iris-setosa:   0  Iris-versicolor:  50  Iris-virginica:  17  
  • Orange/testing/regression/results_reference/optimization-tuning1.py.txt

    r10093 r10304  
    1 *** optimization  1: [0.9706992853681718]: 
    2 *** optimization  2: [0.9743207136103917]: 
    3 *** optimization  3: [0.9833880175537593]: 
    4 *** optimization  4: [0.987881777448925]: 
    5 *** optimization  5: [0.9889423897378024]: 
    6 *** optimization  10: [0.9869203192362646]: 
    7 *** optimization  15: [0.9884223913820658]: 
    8 *** optimization  20: [0.9780406731017164]: 
     1*** optimization  1: 0.97069929: 
     2*** optimization  2: 0.97432071: 
     3*** optimization  3: 0.98338802: 
     4*** optimization  4: 0.98788178: 
     5*** optimization  5: 0.98894239: 
     6*** optimization  10: 0.98692032: 
     7*** optimization  15: 0.98842239: 
     8*** optimization  20: 0.97804067: 
    99*** Optimal parameter: min_subset = 5 
    1010Optimal setting:  5 
    11 *** optimization  1: [0.9832190860215053]: 
    12 *** optimization  2: [0.9781989247311826]: 
    13 *** optimization  3: [0.9912679211469535]: 
    14 *** optimization  4: [0.9937656810035842]: 
    15 *** optimization  5: [0.9907504480286738]: 
    16 *** optimization  10: [0.9872647849462366]: 
    17 *** optimization  15: [0.9897692652329749]: 
    18 *** optimization  20: [0.9910506272401434]: 
     11*** optimization  1: 0.98321909: 
     12*** optimization  2: 0.97819892: 
     13*** optimization  3: 0.99126792: 
     14*** optimization  4: 0.99376568: 
     15*** optimization  5: 0.99075045: 
     16*** optimization  10: 0.98726478: 
     17*** optimization  15: 0.98976927: 
     18*** optimization  20: 0.99105063: 
    1919*** Optimal parameter: min_subset = 4 
    20 *** optimization  1: [0.9729637096774194]: 
    21 *** optimization  2: [0.9727867383512546]: 
    22 *** optimization  3: [0.9808624551971327]: 
    23 *** optimization  4: [0.9820990143369175]: 
    24 *** optimization  5: [0.9854368279569893]: 
    25 *** optimization  10: [0.9885685483870967]: 
    26 *** optimization  15: [0.9916263440860216]: 
    27 *** optimization  20: [0.986008064516129]: 
     20*** optimization  1: 0.97296371: 
     21*** optimization  2: 0.97278674: 
     22*** optimization  3: 0.98086246: 
     23*** optimization  4: 0.98209901: 
     24*** optimization  5: 0.98543683: 
     25*** optimization  10: 0.98856855: 
     26*** optimization  15: 0.99162634: 
     27*** optimization  20: 0.98600806: 
    2828*** Optimal parameter: min_subset = 15 
    29 *** optimization  1: [0.9802307347670249]: 
    30 *** optimization  2: [0.9830689964157706]: 
    31 *** optimization  3: [0.9824529569892473]: 
    32 *** optimization  4: [0.9896012544802868]: 
    33 *** optimization  5: [0.9847267025089605]: 
    34 *** optimization  10: [0.9896527777777778]: 
    35 *** optimization  15: [0.9874350358422939]: 
    36 *** optimization  20: [0.9743794802867383]: 
     29*** optimization  1: 0.98023073: 
     30*** optimization  2: 0.98306900: 
     31*** optimization  3: 0.98245296: 
     32*** optimization  4: 0.98960125: 
     33*** optimization  5: 0.98472670: 
     34*** optimization  10: 0.98965278: 
     35*** optimization  15: 0.98743504: 
     36*** optimization  20: 0.97437948: 
    3737*** Optimal parameter: min_subset = 10 
    38 *** optimization  1: [0.9682504480286739]: 
    39 *** optimization  2: [0.9763328853046596]: 
    40 *** optimization  3: [0.974834229390681]: 
    41 *** optimization  4: [0.980266577060932]: 
    42 *** optimization  5: [0.9795676523297492]: 
    43 *** optimization  10: [0.9769332437275986]: 
    44 *** optimization  15: [0.9773454301075268]: 
    45 *** optimization  20: [0.9740815412186381]: 
     38*** optimization  1: 0.96825045: 
     39*** optimization  2: 0.97633289: 
     40*** optimization  3: 0.97483423: 
     41*** optimization  4: 0.98026658: 
     42*** optimization  5: 0.97956765: 
     43*** optimization  10: 0.97693324: 
     44*** optimization  15: 0.97734543: 
     45*** optimization  20: 0.97408154: 
    4646*** Optimal parameter: min_subset = 4 
    47 *** optimization  1: [0.9640591397849462]: 
    48 *** optimization  2: [0.9741397849462365]: 
    49 *** optimization  3: [0.9783467741935483]: 
    50 *** optimization  4: [0.9872110215053763]: 
    51 *** optimization  5: [0.9868839605734767]: 
    52 *** optimization  10: [0.987806899641577]: 
    53 *** optimization  15: [0.9802083333333333]: 
    54 *** optimization  20: [0.9767137096774194]: 
     47*** optimization  1: 0.96405914: 
     48*** optimization  2: 0.97413978: 
     49*** optimization  3: 0.97834677: 
     50*** optimization  4: 0.98721102: 
     51*** optimization  5: 0.98688396: 
     52*** optimization  10: 0.98780690: 
     53*** optimization  15: 0.98020833: 
     54*** optimization  20: 0.97671371: 
    5555*** Optimal parameter: min_subset = 10 
    56 *** optimization  1: [0.9743570788530466]: 
    57 *** optimization  2: [0.9743370547143588]: 
    58 *** optimization  3: [0.977821071977178]: 
    59 *** optimization  4: [0.9755929522346574]: 
    60 *** optimization  5: [0.9834564497842148]: 
    61 *** optimization  10: [0.984518277741204]: 
    62 *** optimization  15: [0.9816068228366615]: 
    63 *** optimization  20: [0.9802781892326824]: 
     56*** optimization  1: 0.97435708: 
     57*** optimization  2: 0.97433705: 
     58*** optimization  3: 0.97782107: 
     59*** optimization  4: 0.97559295: 
     60*** optimization  5: 0.98345645: 
     61*** optimization  10: 0.98451828: 
     62*** optimization  15: 0.98160682: 
     63*** optimization  20: 0.98027819: 
    6464*** Optimal parameter: min_subset = 10 
    65 *** optimization  1: [0.9764701740911419]: 
    66 *** optimization  2: [0.9851986870016823]: 
    67 *** optimization  3: [0.9877503017335967]: 
    68 *** optimization  4: [0.9894257461048936]: 
    69 *** optimization  5: [0.9890922847633676]: 
    70 *** optimization  10: [0.9826276881720429]: 
    71 *** optimization  15: [0.9815133768561187]: 
    72 *** optimization  20: [0.9825131208397337]: 
     65*** optimization  1: 0.97647017: 
     66*** optimization  2: 0.98519869: 
     67*** optimization  3: 0.98775030: 
     68*** optimization  4: 0.98942575: 
     69*** optimization  5: 0.98909228: 
     70*** optimization  10: 0.98262769: 
     71*** optimization  15: 0.98151338: 
     72*** optimization  20: 0.98251312: 
    7373*** Optimal parameter: min_subset = 4 
    74 *** optimization  1: [0.980901680564699]: 
    75 *** optimization  2: [0.9916655694535879]: 
    76 *** optimization  3: [0.9883444426157559]: 
    77 *** optimization  4: [0.9884388029405311]: 
    78 *** optimization  5: [0.9908789591105258]: 
    79 *** optimization  10: [0.9896376910979445]: 
    80 *** optimization  15: [0.9890296979006656]: 
    81 *** optimization  20: [0.9841350760734402]: 
     74*** optimization  1: 0.98090168: 
     75*** optimization  2: 0.99166557: 
     76*** optimization  3: 0.98834444: 
     77*** optimization  4: 0.98843880: 
     78*** optimization  5: 0.99087896: 
     79*** optimization  10: 0.98963769: 
     80*** optimization  15: 0.98902970: 
     81*** optimization  20: 0.98413508: 
    8282*** Optimal parameter: min_subset = 2 
    83 *** optimization  1: [0.971771953405018]: 
    84 *** optimization  2: [0.9794041218637994]: 
    85 *** optimization  3: [0.9802464157706093]: 
    86 *** optimization  4: [0.9802710573476703]: 
    87 *** optimization  5: [0.9831070788530465]: 
    88 *** optimization  10: [0.9841397849462366]: 
    89 *** optimization  15: [0.9865120967741937]: 
    90 *** optimization  20: [0.9799551971326166]: 
     83*** optimization  1: 0.97177195: 
     84*** optimization  2: 0.97940412: 
     85*** optimization  3: 0.98024642: 
     86*** optimization  4: 0.98027106: 
     87*** optimization  5: 0.98310708: 
     88*** optimization  10: 0.98413978: 
     89*** optimization  15: 0.98651210: 
     90*** optimization  20: 0.97995520: 
    9191*** Optimal parameter: min_subset = 15 
    92 *** optimization  1: [0.9708422939068101]: 
    93 *** optimization  2: [0.9755510752688172]: 
    94 *** optimization  3: [0.9838418458781363]: 
    95 *** optimization  4: [0.9772222222222224]: 
    96 *** optimization  5: [0.984285394265233]: 
    97 *** optimization  10: [0.989247311827957]: 
    98 *** optimization  15: [0.987780017921147]: 
    99 *** optimization  20: [0.980741487455197]: 
     92*** optimization  1: 0.97084229: 
     93*** optimization  2: 0.97555108: 
     94*** optimization  3: 0.98384185: 
     95*** optimization  4: 0.97722222: 
     96*** optimization  5: 0.98428539: 
     97*** optimization  10: 0.98924731: 
     98*** optimization  15: 0.98778002: 
     99*** optimization  20: 0.98074149: 
    100100*** Optimal parameter: min_subset = 10 
    101101Untuned tree: 0.926 
    102102Tuned tree: 0.983 
    103 *** optimization  1: [0.9706992853681718]: 
    104 *** optimization  2: [0.9743207136103917]: 
    105 *** optimization  3: [0.9833880175537593]: 
    106 *** optimization  4: [0.987881777448925]: 
    107 *** optimization  5: [0.9889423897378024]: 
    108 *** optimization  10: [0.9869203192362646]: 
    109 *** optimization  15: [0.9884223913820658]: 
    110 *** optimization  20: [0.9780406731017164]: 
     103*** optimization  1: 0.97069929: 
     104*** optimization  2: 0.97432071: 
     105*** optimization  3: 0.98338802: 
     106*** optimization  4: 0.98788178: 
     107*** optimization  5: 0.98894239: 
     108*** optimization  10: 0.98692032: 
     109*** optimization  15: 0.98842239: 
     110*** optimization  20: 0.97804067: 
    111111*** Optimal parameter: ['split.continuous_split_constructor.min_subset', 'split.discrete_split_constructor.min_subset'] = 5 
    112112Optimal setting:  5.0 
  • Orange/testing/regression/results_reference/outlier1.py.txt

    r9954 r10304  
    1 [1.9537338562966178, 1.4916207549367024, 2.356702725276853, 1.3547757144824362, 1.3165608011293919, 0.7709043563454572, 2.503729351060475, 1.5073157621637685, 3.136062372082914, 0.6907807913801491, 2.528576427933252, 0.7433781485122628, 1.0879648893391614, 1.5809701088397101, 0.9555196701641486, 0.6410992893842741, 1.3610443564385841, 0.5984280534384303, 0.5764828338874588, 0.6589666024797053, -0.11532119366264716, 0.5946079506443136, 0.2164048799939679, -0.13532808531244495, -0.7735678349429188, -0.7570563574669107, -0.8610439121252985, -1.216805395814878, -0.8496508795832981, -0.5012338746422235, -0.773418158799313, 0.16767396953609892, -0.35117712691223024, 0.48721479093189457, -1.7288308112675763, -0.4117766961169645, -0.788776025114878, -1.3872717005506068, 2.106596327546038, -1.2871466419288893, -0.41878699142409, -1.240546948076198, -0.24337773166773838, -0.9537583510329528, 0.48201088266530023, -0.563332572174737, -0.7443069891450993, -1.764166660649225, -1.7065434213509496, -0.5050438469709504, -1.3054185775296043, -0.8068132489127746, -0.6382812011694322, -1.1627861693976371, -0.45401443972098127, -0.6137196147779603, -0.4977304132654839, -0.028586481599992053, -0.3072906036350176, -0.8829586176959779, -1.444927239489506, -0.0966882635820835, -0.5922128607740018, 0.6084011487364829, -0.6517359795265918, -0.2999219699247945, -1.2488097930212723, 1.199403408838386, -1.1695890709679937, 0.436954182465409, -1.1042173101212782, -0.6650977784109602, -0.5139643196272795, -0.5198433080158704, 0.0043002748331546275, -0.5518119098417992, 1.3441195820509282, -0.712948924486379, -0.018440170740114344, 0.36568005901789147, -0.5072588384978175, -1.1650625423904195, -0.38855801862147704, 0.30554976386664046, -0.6907406016864835, 1.5280992689860888, -0.10141847608741561, 0.47051948353017276, -0.5387045991248854, 0.978513599005968, -0.40786746694014064, 0.4800826365630705, 0.2240552203634309, -0.9595995086522562, -0.8351580826251421, 0.4753320019864212, -0.57881353707992, 1.0640441766510356, -0.5847533481883163, -0.36677168526134823, -0.11674188951656596, 0.5851323848134786, 0.5079904777408251, -0.41123866164618883, 0.7823550287478366, 0.036505126098621624, 0.5777693152445059, 0.1748309228565252] 
     11.95373386, 1.49162075, 2.35670273, 1.35477571, 1.31656080, 0.77090436, 2.50372935, 1.50731576, 3.13606237, 0.69078079, 2.52857643, 0.74337815, 1.08796489, 1.58097011, 0.95551967, 0.64109929, 1.36104436, 0.59842805, 0.57648283, 0.65896660, -0.11532119, 0.59460795, 0.21640488, -0.13532809, -0.77356783, -0.75705636, -0.86104391, -1.21680540, -0.84965088, -0.50123387, -0.77341816, 0.16767397, -0.35117713, 0.48721479, -1.72883081, -0.41177670, -0.78877603, -1.38727170, 2.10659633, -1.28714664, -0.41878699, -1.24054695, -0.24337773, -0.95375835, 0.48201088, -0.56333257, -0.74430699, -1.76416666, -1.70654342, -0.50504385, -1.30541858, -0.80681325, -0.63828120, -1.16278617, -0.45401444, -0.61371961, -0.49773041, -0.02858648, -0.30729060, -0.88295862, -1.44492724, -0.09668826, -0.59221286, 0.60840115, -0.65173598, -0.29992197, -1.24880979, 1.19940341, -1.16958907, 0.43695418, -1.10421731, -0.66509778, -0.51396432, -0.51984331, 0.00430027, -0.55181191, 1.34411958, -0.71294892, -0.01844017, 0.36568006, -0.50725884, -1.16506254, -0.38855802, 0.30554976, -0.69074060, 1.52809927, -0.10141848, 0.47051948, -0.53870460, 0.97851360, -0.40786747, 0.48008264, 0.22405522, -0.95959951, -0.83515808, 0.47533200, -0.57881354, 1.06404418, -0.58475335, -0.36677169, -0.11674189, 0.58513238, 0.50799048, -0.41123866, 0.78235503, 0.03650513, 0.57776932, 0.17483092 
  • Orange/testing/regression/results_reference/statExample0.py.txt

    r10006 r10304  
    1 CA = [0.90338266384778, 0.8252114164904862, 0.6137949260042285] 
    2 AUC =  [0.9738530668677726, 0.9259908664320429, 0.49999999999999994] 
     1CA = ['0.903383', '0.825211', '0.613795'] 
     2AUC =  ['0.973853', '0.925991', '0.500000'] 
  • Orange/testing/regression/results_reference/svm-linear-weights.py.txt

    r10118 r10304  
    1 [0.030539018557891578, 0.04645238275160125, 0.06557659077448137, 0.11428450056762224, 0.11474308886955724, 0.12396520046361383, 0.13024372486415015, 0.1308234316119738, 0.13641650791763626, 0.13741762346585432, 0.13876265882583333, 0.14088060621674625, 0.14935761521655416, 0.15183429673463036, 0.15913983311663107, 0.16085715739160683, 0.16577258493775038, 0.16956982427965123, 0.1699365258012951, 0.1820761083768519, 0.18270335600911874, 0.18310901108005986, 0.18433878873311124, 0.1905674816738207, 0.19091815990337407, 0.19198054903386352, 0.19329749923741518, 0.20088381949723239, 0.20615575833508282, 0.2093390824178926, 0.21379911334384863, 0.21754881357923167, 0.21951931922594184, 0.23473821977067888, 0.24965577080121784, 0.24968263583989325, 0.2498282401589412, 0.26459268873021585, 0.26780459416067937, 0.27980454530046744, 0.3092287272528724, 0.3192185000510415, 0.32729758144823035, 0.33471969574325466, 0.3367416107117914, 0.3573777070361102, 0.3658301477295572, 0.36698713537474476, 0.3834942300173535, 0.38909905042009185, 0.40417556232809326, 0.4097605043285248, 0.42425268429856355, 0.44789694844623534, 0.44932601596409105, 0.4580618143377812, 0.4786304184632838, 0.49451797411099035, 0.5474715182870784, 0.55305024494988, 0.564756618474929, 0.5838556086306895, 0.5939764872379445, 0.5951914850021424, 0.5965599387054419, 0.693249161777514, 0.6947037871090163, 0.7078062232168753, 0.8081568079276176, 0.8466167037587657, 0.864767371223623, 0.9867456006798212, 1.000320207469925, 1.0683498605674933, 1.200079459496442, 1.4452997087693935, 1.9466556509082333, 2.248793727194904, 3.2086605964825132] 
     1['0.0305390186', '0.0464523828', '0.0655765908', '0.1142845006', '0.1147430889', '0.1239652005', '0.1302437249', '0.1308234316', '0.1364165079', '0.1374176235', '0.1387626588', '0.1408806062', '0.1493576152', '0.1518342967', '0.1591398331', '0.1608571574', '0.1657725849', '0.1695698243', '0.1699365258', '0.1820761084', '0.1827033560', '0.1831090111', '0.1843387887', '0.1905674817', '0.1909181599', '0.1919805490', '0.1932974992', '0.2008838195', '0.2061557583', '0.2093390824', '0.2137991133', '0.2175488136', '0.2195193192', '0.2347382198', '0.2496557708', '0.2496826358', '0.2498282402', '0.2645926887', '0.2678045942', '0.2798045453', '0.3092287273', '0.3192185001', '0.3272975814', '0.3347196957', '0.3367416107', '0.3573777070', '0.3658301477', '0.3669871354', '0.3834942300', '0.3890990504', '0.4041755623', '0.4097605043', '0.4242526843', '0.4478969484', '0.4493260160', '0.4580618143', '0.4786304185', '0.4945179741', '0.5474715183', '0.5530502449', '0.5647566185', '0.5838556086', '0.5939764872', '0.5951914850', '0.5965599387', '0.6932491618', '0.6947037871', '0.7078062232', '0.8081568079', '0.8466167038', '0.8647673712', '0.9867456007', '1.0003202075', '1.0683498606', '1.2000794595', '1.4452997088', '1.9466556509', '2.2487937272', '3.2086605965'] 
  • Orange/testing/regression/results_reference/testing-example.py.txt

    r10290 r10304  
    1 [0.9200000000000002, 0.3333333333333333] 
     1['0.9200', '0.3333'] 
  • Orange/testing/regression/results_tests_20/modules_outlier1.py.txt

    r9951 r10304  
    1 [1.9537338562966178, 1.4916207549367024, 2.356702725276853, 1.3547757144824362, 1.3165608011293919, 0.7709043563454572, 2.503729351060475, 1.5073157621637685, 3.136062372082914, 0.6907807913801491, 2.528576427933252, 0.7433781485122628, 1.0879648893391614, 1.5809701088397101, 0.9555196701641486, 0.6410992893842741, 1.3610443564385841, 0.5984280534384303, 0.5764828338874588, 0.6589666024797053, -0.11532119366264716, 0.5946079506443136, 0.2164048799939679, -0.13532808531244495, -0.7735678349429188, -0.7570563574669107, -0.8610439121252985, -1.216805395814878, -0.8496508795832981, -0.5012338746422235, -0.773418158799313, 0.16767396953609892, -0.35117712691223024, 0.48721479093189457, -1.7288308112675763, -0.4117766961169645, -0.788776025114878, -1.3872717005506068, 2.106596327546038, -1.2871466419288893, -0.41878699142409, -1.240546948076198, -0.24337773166773838, -0.9537583510329528, 0.48201088266530023, -0.563332572174737, -0.7443069891450993, -1.764166660649225, -1.7065434213509496, -0.5050438469709504, -1.3054185775296043, -0.8068132489127746, -0.6382812011694322, -1.1627861693976371, -0.45401443972098127, -0.6137196147779603, -0.4977304132654839, -0.028586481599992053, -0.3072906036350176, -0.8829586176959779, -1.444927239489506, -0.0966882635820835, -0.5922128607740018, 0.6084011487364829, -0.6517359795265918, -0.2999219699247945, -1.2488097930212723, 1.199403408838386, -1.1695890709679937, 0.436954182465409, -1.1042173101212782, -0.6650977784109602, -0.5139643196272795, -0.5198433080158704, 0.0043002748331546275, -0.5518119098417992, 1.3441195820509282, -0.712948924486379, -0.018440170740114344, 0.36568005901789147, -0.5072588384978175, -1.1650625423904195, -0.38855801862147704, 0.30554976386664046, -0.6907406016864835, 1.5280992689860888, -0.10141847608741561, 0.47051948353017276, -0.5387045991248854, 0.978513599005968, -0.40786746694014064, 0.4800826365630705, 0.2240552203634309, -0.9595995086522562, -0.8351580826251421, 0.4753320019864212, -0.57881353707992, 1.0640441766510356, -0.5847533481883163, -0.36677168526134823, -0.11674188951656596, 0.5851323848134786, 0.5079904777408251, -0.41123866164618883, 0.7823550287478366, 0.036505126098621624, 0.5777693152445059, 0.1748309228565252] 
     11.95373386, 1.49162075, 2.35670273, 1.35477571, 1.31656080, 0.77090436, 2.50372935, 1.50731576, 3.13606237, 0.69078079, 2.52857643, 0.74337815, 1.08796489, 1.58097011, 0.95551967, 0.64109929, 1.36104436, 0.59842805, 0.57648283, 0.65896660, -0.11532119, 0.59460795, 0.21640488, -0.13532809, -0.77356783, -0.75705636, -0.86104391, -1.21680540, -0.84965088, -0.50123387, -0.77341816, 0.16767397, -0.35117713, 0.48721479, -1.72883081, -0.41177670, -0.78877603, -1.38727170, 2.10659633, -1.28714664, -0.41878699, -1.24054695, -0.24337773, -0.95375835, 0.48201088, -0.56333257, -0.74430699, -1.76416666, -1.70654342, -0.50504385, -1.30541858, -0.80681325, -0.63828120, -1.16278617, -0.45401444, -0.61371961, -0.49773041, -0.02858648, -0.30729060, -0.88295862, -1.44492724, -0.09668826, -0.59221286, 0.60840115, -0.65173598, -0.29992197, -1.24880979, 1.19940341, -1.16958907, 0.43695418, -1.10421731, -0.66509778, -0.51396432, -0.51984331, 0.00430027, -0.55181191, 1.34411958, -0.71294892, -0.01844017, 0.36568006, -0.50725884, -1.16506254, -0.38855802, 0.30554976, -0.69074060, 1.52809927, -0.10141848, 0.47051948, -0.53870460, 0.97851360, -0.40786747, 0.48008264, 0.22405522, -0.95959951, -0.83515808, 0.47533200, -0.57881354, 1.06404418, -0.58475335, -0.36677169, -0.11674189, 0.58513238, 0.50799048, -0.41123866, 0.78235503, 0.03650513, 0.57776932, 0.17483092 
  • Orange/testing/regression/results_tests_20/modules_tuning1.py.txt

    r9951 r10304  
    1 *** optimization  1: [0.9706992853681718]: 
    2 *** optimization  2: [0.9743207136103917]: 
    3 *** optimization  3: [0.9833880175537593]: 
    4 *** optimization  4: [0.987881777448925]: 
    5 *** optimization  5: [0.9889423897378024]: 
    6 *** optimization  10: [0.9869203192362646]: 
    7 *** optimization  15: [0.9884223913820658]: 
    8 *** optimization  20: [0.9780406731017164]: 
     1*** optimization  1: 0.97069929: 
     2*** optimization  2: 0.97432071: 
     3*** optimization  3: 0.98338802: 
     4*** optimization  4: 0.98788178: 
     5*** optimization  5: 0.98894239: 
     6*** optimization  10: 0.98692032: 
     7*** optimization  15: 0.98842239: 
     8*** optimization  20: 0.97804067: 
    99*** Optimal parameter: minSubset = 5 
    1010Optimal setting:  5 
    11 *** optimization  1: [0.9832190860215053]: 
    12 *** optimization  2: [0.9781989247311826]: 
    13 *** optimization  3: [0.9912679211469535]: 
    14 *** optimization  4: [0.9937656810035842]: 
    15 *** optimization  5: [0.9907504480286738]: 
    16 *** optimization  10: [0.9872647849462366]: 
    17 *** optimization  15: [0.9897692652329749]: 
    18 *** optimization  20: [0.9910506272401434]: 
     11*** optimization  1: 0.98321909: 
     12*** optimization  2: 0.97819892: 
     13*** optimization  3: 0.99126792: 
     14*** optimization  4: 0.99376568: 
     15*** optimization  5: 0.99075045: 
     16*** optimization  10: 0.98726478: 
     17*** optimization  15: 0.98976927: 
     18*** optimization  20: 0.99105063: 
    1919*** Optimal parameter: minSubset = 4 
    20 *** optimization  1: [0.9729637096774194]: 
    21 *** optimization  2: [0.9727867383512546]: 
    22 *** optimization  3: [0.9808624551971327]: 
    23 *** optimization  4: [0.9820990143369175]: 
    24 *** optimization  5: [0.9854368279569893]: 
    25 *** optimization  10: [0.9885685483870967]: 
    26 *** optimization  15: [0.9916263440860216]: 
    27 *** optimization  20: [0.986008064516129]: 
     20*** optimization  1: 0.97296371: 
     21*** optimization  2: 0.97278674: 
     22*** optimization  3: 0.98086246: 
     23*** optimization  4: 0.98209901: 
     24*** optimization  5: 0.98543683: 
     25*** optimization  10: 0.98856855: 
     26*** optimization  15: 0.99162634: 
     27*** optimization  20: 0.98600806: 
    2828*** Optimal parameter: minSubset = 15 
    29 *** optimization  1: [0.9802307347670249]: 
    30 *** optimization  2: [0.9830689964157706]: 
    31 *** optimization  3: [0.9824529569892473]: 
    32 *** optimization  4: [0.9896012544802868]: 
    33 *** optimization  5: [0.9847267025089605]: 
    34 *** optimization  10: [0.9896527777777778]: 
    35 *** optimization  15: [0.9874350358422939]: 
    36 *** optimization  20: [0.9743794802867383]: 
     29*** optimization  1: 0.98023073: 
     30*** optimization  2: 0.98306900: 
     31*** optimization  3: 0.98245296: 
     32*** optimization  4: 0.98960125: 
     33*** optimization  5: 0.98472670: 
     34*** optimization  10: 0.98965278: 
     35*** optimization  15: 0.98743504: 
     36*** optimization  20: 0.97437948: 
    3737*** Optimal parameter: minSubset = 10 
    38 *** optimization  1: [0.9682504480286739]: 
    39 *** optimization  2: [0.9763328853046596]: 
    40 *** optimization  3: [0.974834229390681]: 
    41 *** optimization  4: [0.980266577060932]: 
    42 *** optimization  5: [0.9795676523297492]: 
    43 *** optimization  10: [0.9769332437275986]: 
    44 *** optimization  15: [0.9773454301075268]: 
    45 *** optimization  20: [0.9740815412186381]: 
     38*** optimization  1: 0.96825045: 
     39*** optimization  2: 0.97633289: 
     40*** optimization  3: 0.97483423: 
     41*** optimization  4: 0.98026658: 
     42*** optimization  5: 0.97956765: 
     43*** optimization  10: 0.97693324: 
     44*** optimization  15: 0.97734543: 
     45*** optimization  20: 0.97408154: 
    4646*** Optimal parameter: minSubset = 4 
    47 *** optimization  1: [0.9640591397849462]: 
    48 *** optimization  2: [0.9741397849462365]: 
    49 *** optimization  3: [0.9783467741935483]: 
    50 *** optimization  4: [0.9872110215053763]: 
    51 *** optimization  5: [0.9868839605734767]: 
    52 *** optimization  10: [0.987806899641577]: 
    53 *** optimization  15: [0.9802083333333333]: 
    54 *** optimization  20: [0.9767137096774194]: 
     47*** optimization  1: 0.96405914: 
     48*** optimization  2: 0.97413978: 
     49*** optimization  3: 0.97834677: 
     50*** optimization  4: 0.98721102: 
     51*** optimization  5: 0.98688396: 
     52*** optimization  10: 0.98780690: 
     53*** optimization  15: 0.98020833: 
     54*** optimization  20: 0.97671371: 
    5555*** Optimal parameter: minSubset = 10 
    56 *** optimization  1: [0.9743570788530466]: 
    57 *** optimization  2: [0.9743370547143588]: 
    58 *** optimization  3: [0.977821071977178]: 
    59 *** optimization  4: [0.9755929522346574]: 
    60 *** optimization  5: [0.9834564497842148]: 
    61 *** optimization  10: [0.984518277741204]: 
    62 *** optimization  15: [0.9816068228366615]: 
    63 *** optimization  20: [0.9802781892326824]: 
     56*** optimization  1: 0.97435708: 
     57*** optimization  2: 0.97433705: 
     58*** optimization  3: 0.97782107: 
     59*** optimization  4: 0.97559295: 
     60*** optimization  5: 0.98345645: 
     61*** optimization  10: 0.98451828: 
     62*** optimization  15: 0.98160682: 
     63*** optimization  20: 0.98027819: 
    6464*** Optimal parameter: minSubset = 10 
    65 *** optimization  1: [0.9764701740911419]: 
    66 *** optimization  2: [0.9851986870016823]: 
    67 *** optimization  3: [0.9877503017335967]: 
    68 *** optimization  4: [0.9894257461048936]: 
    69 *** optimization  5: [0.9890922847633676]: 
    70 *** optimization  10: [0.9826276881720429]: 
    71 *** optimization  15: [0.9815133768561187]: 
    72 *** optimization  20: [0.9825131208397337]: 
     65*** optimization  1: 0.97647017: 
     66*** optimization  2: 0.98519869: 
     67*** optimization  3: 0.98775030: 
     68*** optimization  4: 0.98942575: 
     69*** optimization  5: 0.98909228: 
     70*** optimization  10: 0.98262769: 
     71*** optimization  15: 0.98151338: 
     72*** optimization  20: 0.98251312: 
    7373*** Optimal parameter: minSubset = 4 
    74 *** optimization  1: [0.980901680564699]: 
    75 *** optimization  2: [0.9916655694535879]: 
    76 *** optimization  3: [0.9883444426157559]: 
    77 *** optimization  4: [0.9884388029405311]: 
    78 *** optimization  5: [0.9908789591105258]: 
    79 *** optimization  10: [0.9896376910979445]: 
    80 *** optimization  15: [0.9890296979006656]: 
    81 *** optimization  20: [0.9841350760734402]: 
     74*** optimization  1: 0.98090168: 
     75*** optimization  2: 0.99166557: 
     76*** optimization  3: 0.98834444: 
     77*** optimization  4: 0.98843880: 
     78*** optimization  5: 0.99087896: 
     79*** optimization  10: 0.98963769: 
     80*** optimization  15: 0.98902970: 
     81*** optimization  20: 0.98413508: 
    8282*** Optimal parameter: minSubset = 2 
    83 *** optimization  1: [0.971771953405018]: 
    84 *** optimization  2: [0.9794041218637994]: 
    85 *** optimization  3: [0.9802464157706093]: 
    86 *** optimization  4: [0.9802710573476703]: 
    87 *** optimization  5: [0.9831070788530465]: 
    88 *** optimization  10: [0.9841397849462366]: 
    89 *** optimization  15: [0.9865120967741937]: 
    90 *** optimization  20: [0.9799551971326166]: 
     83*** optimization  1: 0.97177195: 
     84*** optimization  2: 0.97940412: 
     85*** optimization  3: 0.98024642: 
     86*** optimization  4: 0.98027106: 
     87*** optimization  5: 0.98310708: 
     88*** optimization  10: 0.98413978: 
     89*** optimization  15: 0.98651210: 
     90*** optimization  20: 0.97995520: 
    9191*** Optimal parameter: minSubset = 15 
    92 *** optimization  1: [0.9708422939068101]: 
    93 *** optimization  2: [0.9755510752688172]: 
    94 *** optimization  3: [0.9838418458781363]: 
    95 *** optimization  4: [0.9772222222222224]: 
    96 *** optimization  5: [0.984285394265233]: 
    97 *** optimization  10: [0.989247311827957]: 
    98 *** optimization  15: [0.987780017921147]: 
    99 *** optimization  20: [0.980741487455197]: 
     92*** optimization  1: 0.97084229: 
     93*** optimization  2: 0.97555108: 
     94*** optimization  3: 0.98384185: 
     95*** optimization  4: 0.97722222: 
     96*** optimization  5: 0.98428539: 
     97*** optimization  10: 0.98924731: 
     98*** optimization  15: 0.98778002: 
     99*** optimization  20: 0.98074149: 
    100100*** Optimal parameter: minSubset = 10 
    101101Untuned tree: 0.926 
    102102Tuned tree: 0.983 
    103 *** optimization  1: [0.9706992853681718]: 
    104 *** optimization  2: [0.9743207136103917]: 
    105 *** optimization  3: [0.9833880175537593]: 
    106 *** optimization  4: [0.987881777448925]: 
    107 *** optimization  5: [0.9889423897378024]: 
    108 *** optimization  10: [0.9869203192362646]: 
    109 *** optimization  15: [0.9884223913820658]: 
    110 *** optimization  20: [0.9780406731017164]: 
     103*** optimization  1: 0.97069929: 
     104*** optimization  2: 0.97432071: 
     105*** optimization  3: 0.98338802: 
     106*** optimization  4: 0.98788178: 
     107*** optimization  5: 0.98894239: 
     108*** optimization  10: 0.98692032: 
     109*** optimization  15: 0.98842239: 
     110*** optimization  20: 0.97804067: 
    111111*** Optimal parameter: ['split.continuousSplitConstructor.minSubset', 'split.discreteSplitConstructor.minSubset'] = 5 
    112112Optimal setting:  5.0 
  • Orange/testing/regression/results_tests_20/reference_basket.py.txt

    r10290 r10304  
    1010[], {"nobody":1.000, "expects":1.000, "the":1.000, "Spanish":1.000, "Inquisition":1.000} 
    1111[], {"amongst":1.000, "our":1.000, "weaponry":1.000, "are":1.000, "such":1.000, "diverse":1.000, "elements":1.000, "as":1.000, "fear":1.000, "surprise":1.000, "ruthless":1.000, "efficiency":1.000, "an":1.000, "almost":1.000, "fanatical":1.000, "devotion":1.000, "to":1.000, "the":1.000, "Pope":1.000, "and":1.000, "nice":1.000, "red":1.000, "uniforms":1.000, "oh damn":1.000} 
    12 [(-27, <orange.Value 'no'='1.000'>), (-26, <orange.Value 'four'='1.000'>), (-7, <orange.Value 'our'='1.000'>)] 
    13 [(Orange.feature.Continuous 'four', <orange.Value 'four'='1.000'>), (Orange.feature.Continuous 'no', <orange.Value 'no'='1.000'>), (Orange.feature.Continuous 'our', <orange.Value 'our'='1.000'>)] 
  • Orange/testing/regression/results_tests_20/reference_contingency6.py.txt

    r9951 r10304  
    664.69999980927 <2.000, 0.000, 0.000> 
    77 
    8 Contingency keys:  [4.300000190734863, 4.400000095367432, 4.5] 
     8Contingency keys:  ['4.30000019', '4.40000010', '4.50000000'] 
    99Contingency values:  [<1.000, 0.000, 0.000>, <3.000, 0.000, 0.000>, <1.000, 0.000, 0.000>] 
    10 Contingency items:  [(4.300000190734863, <1.000, 0.000, 0.000>), (4.400000095367432, <3.000, 0.000, 0.000>), (4.5, <1.000, 0.000, 0.000>)] 
     10Contingency items:  ['4.30000019, <1.000, 0.000, 0.000>', '4.40000010, <3.000, 0.000, 0.000>', '4.50000000, <1.000, 0.000, 0.000>'] 
    1111 
    12 Error:  invalid index (%5.3f) 
  • Orange/testing/regression/results_tutorial/accuracy4.py.txt

    r9953 r10304  
    1 1: [0.9083969465648855, 0.9007633587786259] 
    2 2: [0.9083969465648855, 0.9007633587786259] 
    3 3: [0.9083969465648855, 0.9007633587786259] 
    4 4: [0.9083969465648855, 0.9007633587786259] 
    5 5: [0.9083969465648855, 0.9007633587786259] 
    6 6: [0.9083969465648855, 0.9007633587786259] 
    7 7: [0.9083969465648855, 0.9007633587786259] 
    8 8: [0.9083969465648855, 0.9007633587786259] 
    9 9: [0.9083969465648855, 0.9007633587786259] 
    10 10: [0.9083969465648855, 0.9007633587786259] 
     11: ['0.908397', '0.900763'] 
     22: ['0.908397', '0.900763'] 
     33: ['0.908397', '0.900763'] 
     44: ['0.908397', '0.900763'] 
     55: ['0.908397', '0.900763'] 
     66: ['0.908397', '0.900763'] 
     77: ['0.908397', '0.900763'] 
     88: ['0.908397', '0.900763'] 
     99: ['0.908397', '0.900763'] 
     1010: ['0.908397', '0.900763'] 
    1111Classification accuracies: 
    1212bayes 0.908396946565 
  • Orange/testing/regression/results_tutorial/accuracy5.py.txt

    r9953 r10304  
    1 1: [0.8863636363636364, 0.9318181818181818] 
    2 2: [0.8863636363636364, 0.9318181818181818] 
    3 3: [0.8863636363636364, 0.9318181818181818] 
    4 4: [0.9318181818181818, 1.0] 
    5 5: [0.9545454545454546, 1.0] 
    6 6: [0.8837209302325582, 0.9767441860465116] 
    7 7: [0.9302325581395349, 0.9534883720930233] 
    8 8: [0.8837209302325582, 0.9069767441860465] 
    9 9: [0.8837209302325582, 1.0] 
    10 10: [0.9069767441860465, 0.9069767441860465] 
     11: ['0.886364', '0.931818'] 
     22: ['0.886364', '0.931818'] 
     33: ['0.886364', '0.931818'] 
     44: ['0.931818', '1.000000'] 
     55: ['0.954545', '1.000000'] 
     66: ['0.883721', '0.976744'] 
     77: ['0.930233', '0.953488'] 
     88: ['0.883721', '0.906977'] 
     99: ['0.883721', '1.000000'] 
     1010: ['0.906977', '0.906977'] 
    1111Classification accuracies: 
    1212bayes 0.903382663848 
  • Orange/testing/regression/results_tutorial/assoc2.py.txt

    r9953 r10304  
    115 most confident rules: 
    22conf    supp    lift    rule 
     31.000   0.585   1.015   drive-wheels=fwd -> engine-location=front 
     41.000   0.556   1.015   num-of-doors=four -> engine-location=front 
     51.000   0.541   1.015   fuel-type=gas drive-wheels=fwd -> engine-location=front 
     61.000   0.507   1.015   aspiration=std drive-wheels=fwd -> engine-location=front 
    371.000   0.478   1.015   fuel-type=gas aspiration=std drive-wheels=fwd -> engine-location=front 
    4 1.000   0.429   1.015   fuel-type=gas aspiration=std num-of-doors=four -> engine-location=front 
    5 1.000   0.507   1.015   aspiration=std drive-wheels=fwd -> engine-location=front 
    6 1.000   0.556   1.015   num-of-doors=four -> engine-location=front 
    7 1.000   0.449   1.015   aspiration=std num-of-doors=four -> engine-location=front 
    88 
    99Rules with confidence>0.800 and lift>1.100 
  • Orange/testing/regression/tests_20/modules_outlier1.py

    r9952 r10304  
    1010outlierDet = orngOutlier.OutlierDetection() 
    1111outlierDet.setExamples(data) 
    12 print outlierDet.zValues() 
     12print ", ".join("%.8f" % val for val in outlierDet.z_values()) 
  • Orange/testing/regression/tests_20/reference_basket.py

    r10113 r10304  
    2828 
    2929example = data[4] 
    30 printSortedMetas(example.getmetas()) 
    31 printSortedMetas(example.getmetas(orange.Variable), 1) 
     30#printSortedMetas(example.getmetas()) 
     31#printSortedMetas(example.getmetas(orange.Variable), 1) 
  • Orange/testing/regression/tests_20/reference_contingency6.py

    r9952 r10304  
    1414print 
    1515 
    16 print "Contingency keys: ", cont.keys()[:3] 
     16print "Contingency keys: ", ["%.8f" % key for key in cont.keys()[:3]] 
    1717print "Contingency values: ", cont.values()[:3] 
    18 print "Contingency items: ", cont.items()[:3] 
     18print "Contingency items: ", ["%.8f, %s" % (key, val) for key, val in cont.items()[:3]] 
    1919print 
    20  
    21 try: 
    22     midkey = (cont.keys()[0] + cont.keys()[1])/2.0 
    23     print "cont[%5.3f] =" % (midkey, cont[midkey]) 
    24 except Exception, v: 
    25     print "Error: ", v 
  • docs/reference/rst/code/statExample0.py

    r10003 r10304  
    88res = Orange.evaluation.testing.cross_validation(learners, voting) 
    99 
    10 print "CA =", Orange.evaluation.scoring.CA(res) 
    11 print "AUC = ", Orange.evaluation.scoring.AUC(res) 
     10print "CA =", ["%.6f" % r for r in Orange.evaluation.scoring.CA(res)] 
     11print "AUC = ", ["%.6f" % r for r in Orange.evaluation.scoring.AUC(res)] 
  • docs/reference/rst/code/svm-linear-weights.py

    r10118 r10304  
    88 
    99weights = svm.get_linear_svm_weights(classifier) 
    10 print sorted(weights.values()) 
     10print sorted("%.10f" % w for w in weights.values()) 
    1111 
    1212import pylab as plt 
  • docs/reference/rst/code/testing-example.py

    r10192 r10304  
    66 
    77cv = Orange.evaluation.testing.cross_validation(learners, iris, folds=5) 
    8 print Orange.evaluation.scoring.CA(cv) 
     8print ["%.4f" % score for score in Orange.evaluation.scoring.CA(cv)] 
    99 
  • docs/tutorial/rst/code/accuracy4.py

    r9374 r10304  
    1010 
    1111def accuracy(test_data, classifiers): 
    12     correct = [0.0]*len(classifiers) 
     12    correct = [0.0] * len(classifiers) 
    1313    for ex in test_data: 
    1414        for i in range(len(classifiers)): 
     
    2020 
    2121def test_rnd_sampling(data, learners, p=0.7, n=10): 
    22     acc = [0.0]*len(learners) 
     22    acc = [0.0] * len(learners) 
    2323    for i in range(n): 
    2424        selection = orange.MakeRandomIndices2(data, p) 
     
    2929            classifiers.append(l(train_data)) 
    3030        acc1 = accuracy(test_data, classifiers) 
    31         print "%d: %s" % (i+1, acc1) 
     31        print "%d: %s" % (i + 1, ["%.6f" % a for a in acc1]) 
    3232        for j in range(len(learners)): 
    3333            acc[j] += acc1[j] 
    3434    for j in range(len(learners)): 
    35         acc[j] = acc[j]/n 
     35        acc[j] = acc[j] / n 
    3636    return acc 
    3737 
    38 orange.setrandseed(0)     
     38orange.setrandseed(0) 
    3939# set up the learners 
    4040bayes = orange.BayesLearner() 
  • docs/tutorial/rst/code/accuracy5.py

    r9374 r10304  
    88 
    99def accuracy(test_data, classifiers): 
    10     correct = [0.0]*len(classifiers) 
     10    correct = [0.0] * len(classifiers) 
    1111    for ex in test_data: 
    1212        for i in range(len(classifiers)): 
     
    1818 
    1919def cross_validation(data, learners, k=10): 
    20     acc = [0.0]*len(learners) 
    21     selection= orange.MakeRandomIndicesCV(data, folds=k) 
     20    acc = [0.0] * len(learners) 
     21    selection = orange.MakeRandomIndicesCV(data, folds=k) 
    2222    for test_fold in range(k): 
    2323        train_data = data.select(selection, test_fold, negate=1) 
     
    2727            classifiers.append(l(train_data)) 
    2828        acc1 = accuracy(test_data, classifiers) 
    29         print "%d: %s" % (test_fold+1, acc1) 
     29        print "%d: %s" % (test_fold + 1, ["%.6f" % a for a in acc1]) 
    3030        for j in range(len(learners)): 
    3131            acc[j] += acc1[j] 
    3232    for j in range(len(learners)): 
    33         acc[j] = acc[j]/k 
     33        acc[j] = acc[j] / k 
    3434    return acc 
    3535 
  • docs/tutorial/rst/code/assoc2.py

    r9374 r10304  
    1212data = data.select(range(10)) 
    1313 
    14 rules = orange.AssociationRulesInducer(data, support = 0.4) 
     14rules = orange.AssociationRulesInducer(data, support=0.4) 
    1515 
    1616n = 5 
    1717print "%i most confident rules:" % (n) 
    18 orngAssoc.sort(rules, ["confidence"]) 
    19 orngAssoc.printRules(rules[0:n], ['confidence','support','lift']) 
     18orngAssoc.sort(rules, ["confidence", "support"]) 
     19orngAssoc.printRules(rules[0:n], ['confidence', 'support', 'lift']) 
    2020 
    2121conf = 0.8; lift = 1.1 
    2222print "\nRules with confidence>%5.3f and lift>%5.3f" % (conf, lift) 
    23 rulesC=rules.filter(lambda x: x.confidence>conf and x.lift>lift) 
     23rulesC = rules.filter(lambda x: x.confidence > conf and x.lift > lift) 
    2424orngAssoc.sort(rulesC, ['confidence']) 
    25 orngAssoc.printRules(rulesC, ['confidence','support','lift']) 
     25orngAssoc.printRules(rulesC, ['confidence', 'support', 'lift']) 
Note: See TracChangeset for help on using the changeset viewer.