Ignore:
Files:
7 added
6 deleted
35 edited

Legend:

Unmodified
Added
Removed
  • MANIFEST.in

    r10365 r10401  
    66recursive-include Orange/orng *.cfg *.c 
    77 
    8 recursive-include source *.bat *.c *.cpp *.h *.hpp *.mak COPYRIGHT *.py *.txt *.sip *.defs *.cmake 
    9 prune source/orangeqt/build 
    10 prune source/*/px 
    11 prune source/*/ppp 
     8recursive-include source *.bat *.c *.cpp *.h *.hpp *.mak COPYRIGHT *.py *.txt *.sip *.defs *.cmake Makefile 
     9recursive-exclude source/orangeqt/build * 
     10 
     11prune source/orange/px 
     12prune source/orange/ppp 
     13 
     14prune source/orangeom/px 
     15prune source/orangene/ppp 
     16 
     17prune source/orangene/px 
     18prune source/orangene/ppp 
     19 
     20exclude source/orangeom/lib_vectors.cpp 
     21exclude source/orangene/lib_vectors.cpp 
    1222 
    1323recursive-include docs *.rst *.py *.png *.css *.txt Makefile 
  • Orange/OrangeWidgets/Unsupervised/OWDistanceFile.py

    r9671 r10398  
    1010import OWGUI 
    1111import orange 
     12import orngMisc 
    1213import exceptions 
    1314import os.path 
  • Orange/classification/rules.py

    r10379 r10407  
    13591359        for c2 in rule2.filter.conditions: 
    13601360            try: 
    1361                 if c1.position == c2.position and type(c1) == type(c2): 
    1362                     continue # same feature and type? 
    1363                 if isinstance(c1, Orange.data.filter.ValueFilterDiscrete): 
    1364                     if type(c1.values[0]) != type(c2.values[0]) or \ 
    1365                             c1.values[0] != c2.values[0]: 
    1366                         continue # same value? 
    1367                 if isinstance(c1, Orange.data.filter.ValueFilterContinuous): 
    1368                     if c1.oper != c2.oper or c1.ref != c2.ref: 
    1369                         continue # same operator? 
     1361                if not c1.position == c2.position: continue # same feature? 
     1362                if not type(c1) == type(c2): continue # same type of condition 
     1363                if type(c1) == Orange.core.ValueFilter_discrete: 
     1364                    if not type(c1.values[0]) == type(c2.values[0]): continue 
     1365                    if not c1.values[0] == c2.values[0]: continue # same value? 
     1366                if type(c1) == Orange.core.ValueFilter_continuous: 
     1367                    if not c1.oper == c2.oper: continue # same operator? 
     1368                    if not c1.ref == c2.ref: continue #same threshold? 
    13701369                found = True 
    13711370                break 
  • Orange/clustering/__init__.py

    r9725 r10421  
    1 """ 
    2 .. index:: clustering 
    3  
    4 Everything about clustering, including agglomerative and hierarchical clustering. 
    5 """ 
    6  
    71from __future__ import with_statement 
    82 
  • Orange/clustering/hierarchical.py

    r10237 r10393  
    104104-------------- 
    105105 
    106 .. autofunction:: dendrogram_draw 
     106.. autofunction:: dendrogram_draw(file, cluster, attr_cluster=None, labels=None, data=None, width=None, height=None, tree_height=None, heatmap_width=None, text_width=None,  spacing=2, cluster_colors={}, color_palette=ColorPalette([(255, 0, 0), (0, 255, 0)]), maxv=None, minv=None, gamma=None, format=None) 
    107107 
    108108.. rubric:: Example 
     
    985985    """ A class for drawing dendrograms. 
    986986     
    987     ``dendrogram_draw`` function is a more convenient interface 
    988     to the functionality provided by this class and. 
     987    :obj:`dendrogram_draw` function is a more convenient interface 
     988    to the functionality provided by this class. 
    989989         
    990990    Example:: 
     
    11341134         
    11351135         
    1136 def dendrogram_draw(file, cluster, attr_cluster = None, labels=None, data=None, 
     1136def dendrogram_draw(file, cluster, attr_cluster=None, labels=None, data=None, 
    11371137                    width=None, height=None, tree_height=None, 
    11381138                    heatmap_width=None, text_width=None,  spacing=2, 
    1139                     cluster_colors={}, color_palette=ColorPalette([(255, 0, 0), (0, 255, 0)]), 
    1140                     maxv=None, minv=None, gamma=None, 
    1141                     format=None): 
     1139                    cluster_colors={}, 
     1140                    color_palette=ColorPalette([(255, 0, 0), (0, 255, 0)]), 
     1141                    maxv=None, minv=None, gamma=None, format=None): 
    11421142    """ Plot the dendrogram to ``file``. 
    11431143     
  • Orange/clustering/kmeans.py

    r9994 r10393  
    99 
    1010 
    11 .. autoclass:: Orange.clustering.kmeans.Clustering 
    12    :members: 
     11.. autoclass:: Orange.clustering.kmeans.Clustering(data=None, centroids=3, maxiters=None, minscorechange=None, stopchanges=0, nstart=1, initialization=init_random, distance=Orange.distance.Euclidean, scoring=score_distance_to_centroids, inner_callback=None, outer_callback=None) 
     12    :members: 
     13    :exclude-members: __init__ 
     14 
     15    .. automethod:: __init__(data=None, centroids=3, maxiters=None, minscorechange=None, stopchanges=0, nstart=1, initialization=init_random, distance=Orange.distance.Euclidean, scoring=score_distance_to_centroids, inner_callback=None, outer_callback=None) 
     16 
    1317 
    1418Examples 
     
    394398                 stopchanges=0, nstart=1, initialization=init_random, 
    395399                 distance=Orange.distance.Euclidean, 
    396                  scoring=score_distance_to_centroids, inner_callback = None, 
    397                  outer_callback = None): 
    398         """ 
    399         :param data: Data instances to be clustered. If not None, clustering will be executed immediately after initialization unless initialize_only=True. 
    400         :type data: :class:`orange.ExampleTable` or None 
     400                 scoring=score_distance_to_centroids, inner_callback=None, 
     401                 outer_callback=None): 
     402        """ 
     403        :param data: Data instances to be clustered. If not None, clustering will be executed immediately after initialization unless ``initialize_only=True``. 
     404        :type data: :class:`~Orange.data.Table` or None 
    401405        :param centroids: either specify a number of clusters or provide a list of examples that will serve as clustering centroids. 
    402         :type centroids: integer or a list of :class:`orange.Example` 
     406        :type centroids: :obj:`int` or :obj:`list` of :class:`~Orange.data.Instance` 
    403407        :param nstart: If greater than one, nstart runs of the clustering algorithm will be executed, returning the clustering with the best (lowest) score. 
    404         :type nstart: integer 
     408        :type nstart: int 
    405409        :param distance: an example distance constructor, which measures the distance between two instances. 
    406         :type distance: :class:`Orange.distance.DistanceConstructor` 
    407         :param initialization: a function to select centroids given data instances, k and a example distance function. This module implements different approaches (:func:`init_random`, :func:`init_diversity`, :class:`init_hclustering`).  
     410        :type distance: :class:`~Orange.distance.DistanceConstructor` 
     411        :param initialization: a function to select centroids given data instances, k and a example distance function. This module implements different approaches (:obj:`init_random`, :obj:`init_diversity`, :obj:`init_hclustering`).  
    408412        :param scoring: a function that takes clustering object and returns the clustering score. It could be used, for instance, in procedure that repeats the clustering nstart times, returning the clustering with the lowest score. 
    409413        :param inner_callback: invoked after every clustering iteration. 
  • Orange/evaluation/reliability.py

    r9936 r10393  
    584584     
    585585    :param bagv: Instance of Bagging Variance estimator. 
    586     :type bagv: :class:`Orange.evaluation.reliability.BaggingVariance` 
     586    :type bagv: :class:`BaggingVariance` 
    587587     
    588588    :param cnk: Instance of CNK estimator. 
    589     :type cnk: :class:`Orange.evaluation.reliability.CNeighbours` 
     589    :type cnk: :class:`CNeighbours` 
    590590     
    591591    :rtype: :class:`Orange.evaluation.reliability.BaggingVarianceCNeighboursClassifier` 
     
    659659    :param box_learner: Learner we want to wrap into a reliability estimation 
    660660        classifier. 
    661     :type box_learner: learner 
     661    :type box_learner: :obj:`~Orange.classification.Learner` 
    662662     
    663663    :param estimators: List of different reliability estimation methods we 
    664664                       want to use on the chosen learner. 
    665     :type estimators: list of reliability estimators 
     665    :type estimators: :obj:`list` of reliability estimators 
    666666     
    667667    :param name: Name of this reliability learner 
     
    671671    """ 
    672672    def __init__(self, box_learner, name="Reliability estimation", 
    673                  estimators = [SensitivityAnalysis(), 
    674                                LocalCrossValidation(), 
    675                                BaggingVarianceCNeighbours(), 
    676                                Mahalanobis(), 
    677                                MahalanobisToCenter() 
    678                                ], 
     673                 estimators=[SensitivityAnalysis(), 
     674                             LocalCrossValidation(), 
     675                             BaggingVarianceCNeighbours(), 
     676                             Mahalanobis(), 
     677                             MahalanobisToCenter()], 
    679678                 **kwds): 
    680679        self.__dict__.update(kwds) 
     
    691690        :type instances: Orange.data.Table 
    692691        :param weight: Id of meta attribute with weights of instances 
    693         :type weight: integer 
     692        :type weight: int 
    694693        :rtype: :class:`Orange.evaluation.reliability.Classifier` 
    695694        """ 
  • Orange/evaluation/testing.py

    r10234 r10414  
    1212class TestedExample: 
    1313    """ 
    14     TestedExample stores predictions of different classifiers for a single testing example. 
    15  
    16     :var classes: A list of predictions of type Value, one for each classifier. 
    17     :var probabilities: A list of probabilities of classes, one for each classifier. 
    18     :var iteration_number: Iteration number (e.g. fold) in which the TestedExample was created/tested. 
    19     :var actual_class: The correct class of the example 
    20     :var weight: Example's weight. Even if the example set was not weighted, this attribute is present and equals 1.0. 
     14    TestedExample stores predictions of different classifiers for a 
     15    single testing data instance. 
     16 
     17    .. attribute:: classes 
     18 
     19        A list of predictions of type Value, one for each classifier. 
     20 
     21    .. attribute:: probabilities 
     22 
     23        A list of probabilities of classes, one for each classifier. 
     24 
     25    .. attribute:: iteration_number 
     26 
     27        Iteration number (e.g. fold) in which the TestedExample was 
     28        created/tested. 
     29 
     30    .. attribute actual_class 
     31 
     32        The correct class of the example 
     33 
     34    .. attribute weight 
     35 
     36        Instance's weight; 1.0 if data was not weighted 
    2137    """ 
    2238 
     
    3753 
    3854    def add_result(self, aclass, aprob): 
    39         """Appends a new result (class and probability prediction by a single classifier) to the classes and probabilities field.""" 
     55        """Append a new result (class and probability prediction by a single classifier) to the classes and probabilities field.""" 
    4056     
    4157        if isinstance(aclass, (list, tuple)): 
     
    5066 
    5167    def set_result(self, i, aclass, aprob): 
    52         """Sets the result of the i-th classifier to the given values.""" 
     68        """Set the result of the i-th classifier to the given values.""" 
    5369        if isinstance(aclass, (list, tuple)): 
    5470            self.classes[i] = aclass 
     
    7288    ``ExperimentResults`` stores results of one or more repetitions of 
    7389    some test (cross validation, repeated sampling...) under the same 
    74     circumstances. 
    75  
    76     :var results: A list of instances of :obj:`TestedExample`, one for each example in the dataset. 
    77     :var classifiers: A list of classifiers, one element for each repetition (eg. fold). Each element is a list 
    78       of classifiers, one for each learner. This field is used only if storing is enabled by ``storeClassifiers=1``. 
    79     :var number_of_iterations: Number of iterations. This can be the number of folds (in cross validation) 
    80       or the number of repetitions of some test. :obj:`TestedExample`'s attribute ``iteration_number`` should 
    81       be in range ``[0, number_of_iterations-1]``. 
    82     :var number_of_learners: Number of learners. Lengths of lists classes and probabilities in each :obj:`TestedExample` 
    83       should equal ``number_of_learners``. 
    84     :var loaded: If the experimental method supports caching and there are no obstacles for caching (such as unknown 
    85       random seeds), this is a list of boolean values. Each element corresponds to a classifier and tells whether the 
    86       experimental results for that classifier were computed or loaded from the cache. 
    87     :var weights: A flag telling whether the results are weighted. If ``False``, weights are still present 
    88       in :obj:`TestedExample`, but they are all ``1.0``. Clear this flag, if your experimental procedure ran on weighted 
    89       testing examples but you would like to ignore the weights in statistics. 
     90    circumstances. Instances of this class are constructed by sampling 
     91    and testing functions from module :obj:`Orange.evaluation.testing` 
     92    and used by methods in module :obj:`Orange.evaluation.scoring`. 
     93 
     94    .. attribute:: results 
     95 
     96        A list of instances of :obj:`TestedExample`, one for each 
     97        example in the dataset. 
     98 
     99    .. attribute:: number_of_iterations 
     100 
     101        Number of iterations. This can be the number of folds (in 
     102        cross validation) or the number of repetitions of some 
     103        test. :obj:`TestedExample`'s attribute ``iteration_number`` 
     104        should be in range ``[0, number_of_iterations-1]``. 
     105 
     106    .. attribute:: number_of_learners 
     107 
     108        Number of learners. Lengths of lists classes and probabilities 
     109        in each :obj:`TestedExample` should equal 
     110        ``number_of_learners``. 
     111 
     112    .. attribute:: classifier_names 
     113 
     114        Stores the names of the classifiers. 
     115 
     116    .. attribute:: classifiers 
     117 
     118        A list of classifiers, one element for each iteration of 
     119        sampling and learning (eg. fold). Each element is a list of 
     120        classifiers, one for each learner. For instance, 
     121        ``classifiers[2][4]`` refers to the 3rd repetition, 5th 
     122        learning algorithm. 
     123 
     124        Note that functions from :obj:`~Orange.evaluation.testing` 
     125        only store classifiers it enabled by setting 
     126        ``storeClassifiers`` to ``1``. 
     127 
     128    .. 
     129        .. attribute:: loaded 
     130 
     131            If the experimental method supports caching and there are no 
     132            obstacles for caching (such as unknown random seeds), this is a 
     133            list of boolean values. Each element corresponds to a classifier 
     134            and tells whether the experimental results for that classifier 
     135            were computed or loaded from the cache. 
     136 
     137    .. attribute:: base_class 
     138 
     139       The reference class for measures like AUC. 
     140 
     141    .. attribute:: class_values 
     142 
     143        The list of class values. 
     144 
     145    .. attribute:: weights 
     146 
     147        A flag telling whether the results are weighted. If ``False``, 
     148        weights are still present in :obj:`TestedExample`, but they 
     149        are all ``1.0``. Clear this flag, if your experimental 
     150        procedure ran on weighted testing examples but you would like 
     151        to ignore the weights in statistics. 
    90152    """ 
    91153    @deprecated_keywords({"classifierNames": "classifier_names", 
     
    198260            preprocessors=(), random_generator=0, callback=None, 
    199261            store_classifiers=False, store_examples=False): 
    200         """Perform cross validation with specified number of folds. 
    201  
    202         :param learners: list of learners to be tested 
    203         :param examples: data table on which the learners will be tested 
    204         :param folds: number of folds to perform 
    205         :param stratified: sets, whether indices should be stratified 
    206         :param preprocessors: a list of preprocessors to be used on data. 
    207         :param random_generator: :obj:`Orange.misc.RandomGenerator` object. 
    208         :param callback: a function that will be called after each fold is 
    209                computed. 
    210         :param store_classifiers: if True, classifiers will be accessible in 
    211                test_results. 
    212         :param store_examples: if True, examples will be accessible in 
    213                test_results. 
     262        """Cross validation test with specified number of folds. 
     263 
     264        :param learners: list of learning algorithms 
     265        :param examples: data instances used for training and testing 
     266        :param folds: number of folds 
     267        :param stratified: tells whether to stratify the sampling 
     268        :param preprocessors: a list of preprocessors to be used on data (obsolete) 
     269        :param random_generator: random seed or generator (see above) 
     270        :param callback: a function that is called after finishing each fold 
     271        :param store_classifiers: if ``True``, classifiers are stored in results 
     272        :param store_examples: if ``True``, examples are stored in results 
    214273        :return: :obj:`ExperimentResults` 
    215274        """ 
     
    234293    def leave_one_out(self, learners, examples, preprocessors=(), 
    235294            callback=None, store_classifiers=False, store_examples=False): 
    236         """Perform leave-one-out evaluation of learners on a data set. 
    237  
    238         :param learners: list of learners to be tested 
    239         :param examples: data table on which the learners will be tested 
    240         :param preprocessors: a list of preprocessors to be used on data. 
    241         :param callback: a function that will be called after each fold is 
    242                computed. 
    243         :param store_classifiers: if True, classifiers will be accessible in 
    244                test_results. 
    245         :param store_examples: if True, examples will be accessible in 
    246                test_results. 
     295        """Leave-one-out evaluation of learning algorithms. 
     296 
     297        :param learners: list of learning algorithms 
     298        :param examples: data instances used for training and testing 
     299        :param preprocessors: a list of preprocessors (obsolete) 
     300        :param callback: a function that is called after finishing each fold 
     301        :param store_classifiers: if ``True``, classifiers are stored in results 
     302        :param store_examples: if ``True``, examples are stored in results 
    247303        :return: :obj:`ExperimentResults` 
    248304        """ 
     
    257313                          "pps":"preprocessors"}) 
    258314    def test_with_indices(self, learners, examples, indices, preprocessors=(), 
    259             callback=None, store_classifiers=False, store_examples=False, 
    260             **kwargs): 
     315            callback=None, store_classifiers=False, store_examples=False): 
    261316        """ 
    262317        Perform a cross-validation-like test. Examples for each fold are 
    263318        selected based on given indices. 
    264319 
    265         :param learners: list of learners to be tested 
    266         :param examples: data table on which the learners will be tested 
    267         :param indices: a list of integers that defines, which examples will be 
    268                used for testing in each fold. The number of indices should be 
    269                equal to the number of examples. 
    270         :param preprocessors: a list of preprocessors to be used on data. 
    271         :param callback: a function that will be called after each fold is 
    272                computed. 
    273         :param store_classifiers: if True, classifiers will be accessible in test_results. 
    274         :param store_examples: if True, examples will be accessible in test_results. 
     320        :param learners: list of learning algorithms 
     321        :param examples: data instances used for training and testing 
     322        :param indices: a list of integer indices that sort examples into folds; each index corresponds to an example from ``examples`` 
     323        :param preprocessors: a list of preprocessors (obsolete) 
     324        :param callback: a function that is called after each fold 
     325        :param store_classifiers: if ``True``, classifiers are stored in results 
     326        :param store_examples: if ``True``, examples are stored in results 
    275327        :return: :obj:`ExperimentResults` 
    276328        """ 
     
    279331            raise ValueError("Test data set with no examples") 
    280332        test_type = self.check_test_type(examples, learners) 
    281         if "cache" in kwargs: 
    282             raise ValueError("This feature is no longer supported.") 
    283333 
    284334        niterations = max(indices)+1 
     
    310360 
    311361    def one_fold_with_indices(self, learners, examples, fold, indices, preprocessors=(), weight=0): 
    312         """Perform one fold of cross-validation like procedure using provided indices.""" 
     362        """Similar to :obj:`test_with_indices` except that it performs single fold of cross-validation, given by argument ``fold``.""" 
    313363        learn_set = examples.selectref(indices, fold, negate=1) 
    314364        test_set = examples.selectref(indices, fold, negate=0) 
     
    338388                                     callback=None, store_classifiers=False, store_examples=False): 
    339389        """ 
    340         Perform a test where learners are trained and tested on the same data. 
    341  
    342         :param learners: list of learners to be tested 
    343         :param examples: data table on which the learners will be tested 
    344         :param preprocessors: a list of preprocessors to be used on data. 
    345         :param callback: a function that will be called after each fold is computed. 
    346         :param store_classifiers: if True, classifiers will be accessible in test_results. 
    347         :param store_examples: if True, examples will be accessible in test_results. 
     390        Train learning algorithms and test them on the same data. 
     391 
     392        :param learners: list of learning algorithms 
     393        :param examples: data instances used for training and testing 
     394        :param preprocessors: a list of preprocessors (obsolete) 
     395        :param callback: a function that is called after each learning 
     396        :param store_classifiers: if ``True``, classifiers are stored in results 
     397        :param store_examples: if ``True``, examples are stored in results 
    348398        :return: :obj:`ExperimentResults` 
    349399        """ 
     
    386436                                    callback=None, store_classifiers=False, store_examples=False): 
    387437        """ 
    388         Perform a test, where learners are trained on one dataset and tested 
    389         on another. 
    390  
    391         :param learners: list of learners to be tested 
    392         :param learn_set: a dataset used for training 
    393         :param test_set: a dataset used for testing 
    394         :param preprocessors: a list of preprocessors to be used on data. 
    395         :param callback: a function that is be called after each classifier is computed. 
    396         :param store_classifiers: if True, classifiers will be accessible in test_results. 
    397         :param store_examples: if True, examples will be accessible in test_results. 
     438        Train learning algorithms on one data sets and test them on another. 
     439 
     440        :param learners: list of learning algorithms 
     441        :param learn_set: training instances 
     442        :param test_set: testing instances 
     443        :param preprocessors: a list of preprocessors (obsolete) 
     444        :param callback: a function that is called after each learning 
     445        :param store_classifiers: if ``True``, classifiers are stored in results 
     446        :param store_examples: if ``True``, examples are stored in results 
    398447        :return: :obj:`ExperimentResults` 
    399448        """ 
     
    436485                   callback=None, store_classifiers=False, store_examples=False): 
    437486        """ 
    438         Perform a test, where learners are trained and tested on different data sets. Training and test sets are 
    439         generated by proportionally splitting examples. 
    440  
    441         :param learners: list of learners to be tested 
    442         :param examples: a dataset used for evaluation 
    443         :param learning_proportion: proportion of examples to be used for training 
    444         :param times: number of test repetitions 
    445         :param stratification: use stratification when constructing train and test sets. 
    446         :param preprocessors: a list of preprocessors to be used on data. 
    447         :param callback: a function that is be called after each classifier is computed. 
    448         :param store_classifiers: if True, classifiers will be accessible in test_results. 
    449         :param store_examples: if True, examples will be accessible in test_results. 
     487        Iteratively split the data into training and testing set, and train and test the learnign algorithms. 
     488 
     489        :param learners: list of learning algorithms 
     490        :param examples: data instances used for training and testing 
     491        :param learning_proportion: proportion of data used for training 
     492        :param times: number of iterations 
     493        :param stratification: use stratified sampling 
     494        :param preprocessors: a list of preprocessors (obsolete) 
     495        :param random_generator: random seed or generator (see above) 
     496        :param callback: a function that is called after each fold 
     497        :param store_classifiers: if ``True``, classifiers are stored in results 
     498        :param store_examples: if ``True``, examples are stored in results 
    450499        :return: :obj:`ExperimentResults` 
    451500        """ 
     
    499548        models are trained on different portions of the training data. 
    500549 
    501         :param learners: list of learners to be tested 
    502         :param examples: a dataset used for evaluation 
    503         :param cv_indices: indices used for crossvalidation 
    504         :param proportion_indices: indices for proportion selection 
    505         :param proportions: proportions of train data to be used 
    506         :param preprocessors: a list of preprocessors to be used on data. 
    507         :param callback: a function that is be called after each classifier is computed. 
     550        :param learners: list of learning algorithms 
     551        :param examples: data instances used for training and testing 
     552        :param cv_indices: indices used for cross validation (leave ``None`` for 10-fold CV) 
     553        :param proportion_indices: indices for proportion selection (leave ``None`` to let the function construct the folds) 
     554        :param proportions: list of proportions of data used for training 
     555        :param preprocessors: a list of preprocessors (obsolete) 
     556        :param random_generator: random seed or generator (see above) 
     557        :param callback: a function that is be called after each learning 
    508558        :return: list of :obj:`ExperimentResults` 
    509559        """ 
     
    540590                       random_generator=0, callback=None): 
    541591        """ 
    542         Compute a learning curve where each cross-validation has given number of folds 
    543         and models are trained on specified proportion of training data. 
    544  
    545         :param learners: list of learners to be tested 
    546         :param examples: a dataset used for evaluation 
     592        Compute a learning curve using multiple cross-validations where 
     593        models are trained on different portions of the training data. 
     594        Similar to :obj:`learning_curve` except for simpler arguments. 
     595 
     596        :param learners: list of learning algorithms 
     597        :param examples: data instances used for training and testing 
    547598        :param folds: number of folds for cross-validation 
    548         :param proportions: proportions of train data to be used 
    549         :param preprocessors: a list of preprocessors to be used on data. 
    550         :param callback: a function that is called after each classifier is computed. 
     599        :param proportions: list of proportions of data used for training 
     600        :param stratification: use stratified sampling 
     601        :param preprocessors: a list of preprocessors (obsolete) 
     602        :param random_generator: random seed or generator (see above) 
     603        :param callback: a function that is be called after each learning 
    551604        :return: list of :obj:`ExperimentResults` 
    552605        """ 
     
    566619        """ 
    567620        Compute a learning curve given two datasets. Models are learned on 
    568         proportion of the first dataset and then used to make predictions for 
    569         the second dataset. 
    570  
    571         :param learners: list of learners to be tested 
    572         :param learn_set: a dataset used for evaluation 
    573         :param test_set: a dataset used for evaluation 
    574         :param proportions: proportions of train data to be used 
    575         :param preprocessors: a list of preprocessors to be used on data. 
     621        proportion of the first dataset and then tested on the second. 
     622 
     623        :param learners: list of learning algorithms 
     624        :param learn_set: training data 
     625        :param test_set: testing data 
     626        :param times: number of iterations 
     627        :param straitification: use stratified sampling 
     628        :param proportions: a list of proportions of training data to be used 
     629        :param preprocessors: a list of preprocessors (obsolete) 
     630        :param random_generator: random seed or generator (see above) 
     631        :param store_classifiers: if ``True``, classifiers are stored in results 
     632        :param store_examples: if ``True``, examples are stored in results 
    576633        :return: list of :obj:`ExperimentResults` 
    577634        """ 
     
    615672    def test_on_data(self, classifiers, examples, store_classifiers=False, store_examples=False): 
    616673        """ 
    617         Test classifiers on examples 
    618  
    619         :param classifiers: classifiers to test 
    620         :param examples: examples to test on 
    621         :param store_classifiers: if True, classifiers will be accessible in test_results. 
    622         :param store_examples: if True, examples will be accessible in test_results. 
     674        Test classifiers on the given data 
     675 
     676        :param classifiers: a list of classifiers 
     677        :param examples: testing data 
     678        :param store_classifiers: if ``True``, classifiers are stored in results 
     679        :param store_examples: if ``True``, examples are stored in results 
    623680        """ 
    624681 
     
    687744     
    688745    def _preprocess_data(self, learn_set, test_set, preprocessors): 
    689         """Apply preprocessors to learn and test dataset""" 
     746        """Apply preprocessors to learn and test dataset (obsolete)""" 
    690747        for p_type, preprocessor in preprocessors: 
    691748            if p_type == "B": 
  • Orange/multilabel/mlknn.py

    r9994 r10417  
    66*************************************** 
    77 
    8 ML-kNN Classification is a kind of adaptation method for multi-label classification. 
    9 It is an adaptation of the kNN lazy learning algorithm for multi-label data. 
    10 In essence, ML-kNN uses the kNN algorithm independently for each label :math:`l`. 
    11 It finds the k nearest examples to the test instance and considers those that are 
    12 labeled at least with :math:`l` as positive and the rest as negative. 
    13 Actually this method follows the paradigm of Binary Relevance (BR). What mainly 
    14 differentiates this method from BR is the use of prior probabilities. ML-kNN has also 
    15 the capability of producing a ranking of the labels as an output. 
    16 For more information, see Zhang, M. and Zhou, Z. 2007. `ML-KNN: A lazy learning 
    17 approach to multi-label learning <http://dx.doi.org/10.1016/j.patcog.2006.12.019>`_.  
    18 Pattern Recogn. 40, 7 (Jul. 2007), 2038-2048.   
     8ML-kNN Classification is an adaptation kNN for multi-label 
     9classification.  In essence, ML-kNN uses the kNN algorithm 
     10independently for each label :math:`l`.  It finds the k nearest 
     11examples to the test instance and considers those that are labeled at 
     12least with :math:`l` as positive and the rest as negative.  What 
     13mainly differentiates this method from other binary relevance (BR) 
     14methods is the use of prior probabilities. ML-kNN can also rank labels. 
     15 
     16For more information, see Zhang, M. and Zhou, Z. 2007. `ML-KNN: A lazy 
     17learning approach to multi-label learning 
     18<http://dx.doi.org/10.1016/j.patcog.2006.12.019>`_.  Pattern 
     19Recogn. 40, 7 (Jul. 2007), 2038-2048. 
    1920 
    2021.. index:: ML-kNN Learner 
     
    5051class MLkNNLearner(_multiknn.MultikNNLearner): 
    5152    """ 
    52     Class implementing the ML-kNN (Multi-Label k Nearest Neighbours) algorithm. The class is based on the  
    53     pseudo-code made available by the authors. 
     53    Class implementing the ML-kNN (Multi-Label k Nearest Neighbours) 
     54    algorithm. The class is based on the pseudo-code made available by 
     55    the authors. 
    5456     
    5557    The pseudo code of ML-kNN: 
  • Orange/multilabel/multiknn.py

    r9936 r10417  
    5454    def __new__(cls, k=1, **argkw): 
    5555        """ 
    56         Constructor of MultikNNLearner 
     56        Constructor for MultikNNLearner 
    5757                 
    5858        :param k: number of nearest neighbors used in classification 
  • Orange/multitarget/__init__.py

    r10331 r10420  
    2929    :lines: 1-6 
    3030 
    31 Multi-target learners can be used to build prediction models (classifiers) 
     31Multi-target learners can build prediction models (classifiers) 
    3232which then predict (multiple) class values for a new instance (continuation of 
    3333:download:`multitarget.py <code/multitarget.py>`): 
     
    9999class MultitargetClassifier(Orange.classification.Classifier): 
    100100    """ 
    101     Multitarget classifier returning a list of predictions from each 
     101    Multitarget classifier that returns a list of predictions from each 
    102102    of the independent base classifiers. 
    103103 
  • Orange/preprocess/__init__.py

    r10238 r10393  
    11""" 
    2 .. autoclass:: Preprocessor_discretizeEntropy 
     2.. autoclass:: Preprocessor_discretizeEntropy(method=Orange.feature.discretization.Entropy()) 
    33 
    44.. autoclass:: Preprocessor_removeContinuous 
     
    1010.. autoclass:: Preprocessor_impute 
    1111 
    12 .. autoclass:: Preprocessor_featureSelection 
     12.. autoclass:: Preprocessor_featureSelection(measure=Orange.feature.scoring.Relief(), filter=None, limit=10) 
    1313 
    1414.. autofunction:: bestP 
     
    156156 
    157157import orange 
     158import Orange 
    158159from Orange.misc import _orange__new__, _orange__reduce__ 
    159160 
     
    168169    __reduce__ = _orange__reduce__ 
    169170     
    170     def __init__(self, method=orange.EntropyDiscretization()): 
     171    def __init__(self, method=Orange.feature.discretization.Entropy()): 
    171172        self.method = method 
    172         assert(isinstance(method, orange.EntropyDiscretization)) 
     173        assert(isinstance(method, Orange.feature.discretization.Entropy)) 
    173174         
    174175    def __call__(self, data, wightId=0): 
     
    274275    bestP = staticmethod(bestP) 
    275276     
    276     def __init__(self, measure=orange.MeasureAttribute_relief(), filter=None, limit=10): 
     277    def __init__(self, measure=Orange.feature.scoring.Relief(), filter=None, limit=10): 
    277278        self.measure = measure 
    278279        self.filter = filter if filter is not None else self.bestN 
  • Orange/projection/mds.py

    r10194 r10393  
    1717.. autoclass:: Orange.projection.mds.MDS 
    1818   :members: 
    19    :exclude-members: Torgerson, get_distance, get_stress 
     19   :exclude-members: Torgerson, get_distance, get_stress, calc_stress, run 
     20 
     21   .. automethod:: calc_stress(stress_func=SgnRelStress) 
     22   .. automethod:: run(iter, stress_func=SgnRelStress, eps=1e-3, progress_callback=None) 
    2023 
    2124Stress functions 
  • Orange/regression/base.py

    r10238 r10396  
    11"""\ 
    2 ==================================== 
    3 Basic regression learner (``basic``) 
    4 ==================================== 
     2======================= 
     3Base regression learner 
     4======================= 
    55 
    66.. index:: regression 
     
    1414 
    1515class BaseRegressionLearner(Orange.core.Learner): 
    16     """ Base Regression Learner "learns" how to treat the discrete 
    17         variables and missing data. 
     16    """Fitting regressors typically requires data that has only 
     17    continuous-valued features and no missing values. This class 
     18    provides methods for appropriate transformation of the data and 
     19    serves as a base class for most regressor classes. 
    1820    """ 
    1921 
     
    3537 
    3638    def set_imputer(self, imputer=None): 
    37         """ Sets the imputer for missing values. 
     39        """ Set the imputer for missing data. 
    3840 
    39         :param imputer: function which imputes the missing values, 
    40             if None, the default imputer: mean for the continuous variables 
    41             and most frequent value (majority) for discrete variables 
     41        :param imputer: function which constructs the imputer for the 
     42            missing values, if ``None``, the default imputer replaces 
     43            missing continuous data with the average of the 
     44            corresponding variable and missing discrete data with the 
     45            most frequent value. 
    4246        :type imputer: None or Orange.feature.imputation.ModelConstructor 
    4347        """ 
     
    5054 
    5155    def set_continuizer(self, continuizer=None): 
    52         """ Sets the continuizer of the discrete variables 
     56        """Set the continuizer of the discrete variables 
    5357 
    54         :param continuizer: function which replaces the categorical (dicrete) 
    55             variables with numerical variables. If None, the default continuizer 
    56             is used 
     58        :param continuizer: function which replaces the categorical 
     59            (dicrete) variables with numerical variables. If ``None``, 
     60            the default continuizer is used 
    5761        :type continuizer: None or Orange.data.continuization.DomainContinuizer 
    5862        """ 
     
    6569 
    6670    def impute_table(self, table): 
    67         """ Imputes missing values. 
    68         Returns a new :class:`Orange.data.Table` object 
     71        """Impute missing values and return a new 
     72        :class:`Orange.data.Table` object 
    6973 
    7074        :param table: data instances. 
     
    7781 
    7882    def continuize_table(self, table): 
    79         """ Continuizes the discrete variables. 
    80         Returns a new :class:`Orange.data.Table` object 
     83        """Replace discrete variables with continuous and return a new 
     84        instance of :class:`Orange.data.Table`. 
    8185 
    8286        :param table: data instances. 
  • Orange/regression/earth.py

    r10330 r10420  
    104104class EarthLearner(Orange.regression.base.BaseRegressionLearner): 
    105105    """Earth learner class. Supports both regression and classification 
    106     problems. In case of classification the class values are expanded into  
     106    problems. For classification, class values are expanded into  
    107107    continuous indicator columns (one for each value if the number of  
    108     values is grater then 2), and a multi response model is learned on these 
    109     new columns. The resulting classifier will then use the computed response 
     108    values is grater then 2), and a multi response model is fit to these 
     109    new columns. The resulting classifier the computes response 
    110110    values on new instances to select the final predicted class. 
    111111      
     
    126126         
    127127        :param degree: Maximum degree (num. of hinge functions per term) 
    128             of the terms in the model. 
     128            of the terms in the model (default: 1). 
    129129        :type degree: int 
    130         :param terms: Maximum number of terms in the forward pass (default 21). 
    131              
    132             .. note:: If this paramter is None then  
    133                 ``min(200, max(20, 2 * n_attributes)) + 1`` will be used. This 
    134                 is the same as the default setting in earth R package. 
    135                  
     130        :param terms: Maximum number of terms in the forward pass 
     131                (default: 21).  If set to ``None``, ``min(200, max(20, 2 
     132                * n_attributes)) + 1`` will be used, like the default 
     133                setting in earth R package. 
    136134        :type terms: int 
    137135        :param penalty: Penalty for hinges in the GCV computation (used  
    138             in the pruning pass). By default it is 3.0 if the degree > 1, 
    139             2.0 otherwise.  
     136            in the pruning pass). Default is 3.0 if ``degree`` is above 1, 
     137            and 2.0 otherwise.  
    140138        :type penalty: float 
    141139        :param thresh: Threshold for RSS decrease in the forward pass 
    142             (default 0.001). 
     140            (default: 0.001). 
    143141        :type thresh: float 
    144142        :param min_span: TODO. 
    145143        :param new_var_penalty: Penalty for introducing a new variable 
    146             in the model during the forward pass (default 0). 
     144            in the model during the forward pass (default: 0). 
    147145        :type new_var_penalty: float 
    148146        :param fast_k: Fast k. 
    149147        :param fast_beta: Fast beta. 
    150148        :param pruned_terms: Maximum number of terms in the model after 
    151             pruning (default None - no limit). 
     149            pruning (default: ``None``, no limit). 
    152150        :type pruned_terms: int 
    153         :param scale_resp: Scale responses prior to forward pass (default 
    154             True - ignored for multi response models). 
     151        :param scale_resp: Scale responses prior to forward pass (default: 
     152            ``True``); ignored for models with multiple responses. 
    155153        :type scale_resp: bool 
    156154        :param store_instances: Store training instances in the model 
    157             (default True). 
     155            (default: ``True``). 
    158156        :type store_instances: bool 
    159157          
     
    333331    def base_matrix(self, instances=None): 
    334332        """Return the base matrix (bx) of the Earth model for the table. 
    335         If table is not supplied the base matrix of the training instances  
     333        If table is not supplied, the base matrix of the training instances  
    336334        is returned. 
    337335        Base matrix is a len(instances) x num_terms matrix of computed values 
     
    350348     
    351349    def predict(self, instance): 
    352         """ Predict the response values for the instance 
     350        """ Predict the response value(s) 
    353351         
    354352        :param instance: Data instance 
     
    363361     
    364362    def used_attributes(self, term=None): 
    365         """ Return the used terms for term (index). If no term is given 
     363        """Return the used terms for term (index). If no term is given, 
    366364        return all attributes in the model. 
    367365         
     
    965963             
    966964class ScoreEarthImportance(scoring.Score): 
    967     """ An :class:`Orange.feature.scoring.Score` subclass. 
    968     Scores features based on their importance in the Earth 
    969     model using ``bagged_evimp``'s function return value. 
     965    """ A subclass of :class:`Orange.feature.scoring.Score` that. 
     966    scores features based on their importance in the Earth 
     967    model using ``bagged_evimp``. 
    970968     
    971969    """ 
  • Orange/regression/pls.py

    r10367 r10420  
    1010`Partial least squares 
    1111<http://en.wikipedia.org/wiki/Partial_least_squares_regression>`_ 
    12 regression is a statistical method which can be used to predict 
    13 multiple response variables simultaniously. Implementation is based on 
    14 `Scikit learn python implementation 
     12regression is a statistical method for simultaneous prediction of 
     13multiple response variables. Orange's implementation is 
     14based on `Scikit learn python implementation 
    1515<https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/pls.py>`_. 
    1616 
     
    3434 
    3535.. autofunction:: svd_xy 
    36  
    3736 
    3837======== 
     
    4039======== 
    4140 
    42 To predict values for the first two data instances 
    43 use the following code:  
     41The following code predicts the values of output variables for the 
     42first two instances in ``data``. 
     43 
    4444 
    4545.. literalinclude:: code/pls-example.py 
    4646    :lines: 16-20 
    4747 
    48 Output:: 
     48:: 
    4949 
    5050    Actual     [<orange.Value 'Y1'='0.490'>, <orange.Value 'Y2'='1.237'>, <orange.Value 'Y3'='1.808'>, <orange.Value 'Y4'='0.422'>] 
     
    5454    Predicted  [<orange.Value 'Y1'='0.058'>, <orange.Value 'Y2'='-0.706'>, <orange.Value 'Y3'='-1.420'>, <orange.Value 'Y4'='0.599'>] 
    5555 
    56 To see the coefficient of the model (in this case they are stored in a matrix) 
    57 print the model: 
     56To see the coefficient of the model, print the model: 
    5857 
    5958.. literalinclude:: code/pls-example.py 
    6059    :lines: 22 
    6160 
    62 The ouptut looks like this:: 
     61:: 
    6362 
    6463    Regression coefficients: 
     
    6867          X3        0.230       -0.314       -0.880       -0.060  
    6968 
    70  
     69Note that coefficients are stored in a matrix since the model predicts 
     70values of multiple outputs. 
    7171""" 
    7272 
     
    8383 
    8484def normalize_matrix(X): 
    85     """ Normalizes matrix, i.e. subtracts column means 
    86     and divides them by column standard deviations. 
    87     Returns the standardized matrix, sample mean and 
    88     standard deviation 
     85    """ 
     86    Normalize a matrix column-wise: subtract the means and divide by 
     87    standard deviations. Returns the standardized matrix, sample mean 
     88    and standard deviation 
    8989 
    9090    :param X: data matrix 
     
    9999def nipals_xy(X, Y, mode="PLS", max_iter=500, tol=1e-06): 
    100100    """ 
    101     NIPALS algorithm. Returns the first left and rigth singular 
     101    NIPALS algorithm; returns the first left and rigth singular 
    102102    vectors of X'Y. 
    103103 
     
    108108    :type mode: string 
    109109 
    110     :param max_iter: maximal number of iterations (default 500) 
     110    :param max_iter: maximal number of iterations (default: 500) 
    111111    :type max_iter: int 
    112112 
    113     :param tol: tolerance parameter, if norm of difference 
     113    :param tol: tolerance parameter; if norm of difference 
    114114        between two successive left singular vectors is less than tol, 
    115115        iteration is stopped 
     
    155155 
    156156def svd_xy(X, Y): 
    157     """ Returns the first left and right singular 
     157    """ Return the first left and right singular 
    158158    vectors of X'Y. 
    159159 
     
    169169 
    170170def select_attrs(table, attributes, class_var=None, metas=None): 
    171     """ Select only ``attributes`` from the ``table``. 
     171    """ Select ``attributes`` from the ``table`` and return a new data table. 
    172172    """ 
    173173    domain = Orange.data.Domain(attributes, class_var) 
     
    178178 
    179179class PLSRegressionLearner(base.BaseRegressionLearner): 
    180     """ Fits the partial least squares regression model, 
    181     i.e. learns the regression parameters. The implementation is based on 
    182     `Scikit learn python implementation`_ 
     180    """ 
     181    Fit the partial least squares regression model, i.e. learn the 
     182    regression parameters. The implementation is based on `Scikit 
     183    learn python implementation`_ 
    183184     
    184185    The class is derived from 
    185     :class:`Orange.regression.base.BaseRegressionLearner` 
    186     which is used for preprocessing the data (continuization and imputation) 
     186    :class:`Orange.regression.base.BaseRegressionLearner` that is 
     187    used for preprocessing the data (continuization and imputation) 
    187188    before fitting the regression parameters 
    188189     
     
    196197        .. attribute:: n_comp 
    197198     
    198             number of components to keep. Default: 2 
     199            number of components to keep (default: 2) 
    199200 
    200201        .. attribute:: deflation_mode 
     
    209210        .. attribute:: algorithm 
    210211     
    211             The algorithm used to estimate the weights: 
     212            The algorithm for estimating the weights: 
    212213            "nipals" or "svd" (default) 
    213214 
     
    231232        :param x_vars, y_vars: List of input and response variables 
    232233            (:obj:`Orange.feature.Continuous` or 
    233             :obj:`Orange.feature.Discrete`). If None (default) it is 
     234            :obj:`Orange.feature.Discrete`). If ``None`` (default) it is 
    234235            assumed that the data domain provides information which variables 
    235236            are reponses and which are not. If data has 
     
    281282 
    282283    def fit(self, X, Y): 
    283         """ Fits all unknown parameters, i.e. 
     284        """ Fit all unknown parameters, i.e. 
    284285        weights, scores, loadings (for x and y) and regression coefficients. 
    285         Returns a dict with all of the parameters. 
    286          
     286        Return a dict with all of the parameters. 
    287287        """ 
    288288        # copy since this will contain the residuals (deflated) matrices 
     
    365365 
    366366class PLSRegression(Orange.classification.Classifier): 
    367     """ PLSRegression predicts value of the response variables 
     367    """ Predict values of the response variables 
    368368    based on the values of independent variables. 
    369369     
  • Orange/regression/tree.py

    r10294 r10395  
    99*************************** 
    1010 
    11 Regression tree shares its implementation with Orange.classification.tree.TreeLearner, 
    12 but uses a different set of functions to evaluate node splitting and stop 
    13 criteria. Usage of regression trees is straightforward as demonstrated on the 
    14 following example (:download:`regression-tree-run.py <code/regression-tree-run.py>`): 
     11Regression tree shares its implementation with 
     12:obj:`Orange.classification.tree.TreeLearner`, but uses a different set of 
     13functions to evaluate node splitting and stop criteria. Usage of 
     14regression trees is straightforward as demonstrated on the following 
     15example (:download:`regression-tree-run.py 
     16<code/regression-tree-run.py>`): 
    1517 
    1618.. literalinclude:: code/regression-tree-run.py 
  • Orange/statistics/basic.py

    r9994 r10399  
    1 """ 
    2 .. index:: Basic Statistics for Continuous Features 
    3  
    4 ======================================== 
    5 Basic Statistics for Continuous Features 
    6 ======================================== 
    7  
    8 The are two simple classes for computing basic statistics 
    9 for continuous features, such as their minimal and maximal value 
    10 or average: :class:`Orange.statistics.basic.Variable` holds the statistics for a single variable 
    11 and :class:`Orange.statistics.basic.Domain` behaves like a list of instances of 
    12 the above class for all variables in the domain. 
    13  
    14 .. class:: Variable 
    15  
    16     Computes and stores minimal, maximal, average and 
    17     standard deviation of a variable. It does not include the median or any 
    18     other statistics that can be computed on the fly, without remembering the 
    19     data; such statistics can be obtained classes from module :obj:`Orange.statistics.distribution`. 
    20  
    21     Instances of this class are seldom constructed manually; they are more often 
    22     returned by :obj:`Domain` described below. 
    23  
    24     .. attribute:: variable 
    25      
    26         The variable to which the data applies. 
    27  
    28     .. attribute:: min 
    29  
    30         Minimal value encountered 
    31  
    32     .. attribute:: max 
    33  
    34         Maximal value encountered 
    35  
    36     .. attribute:: avg 
    37  
    38         Average value 
    39  
    40     .. attribute:: dev 
    41  
    42         Standard deviation 
    43  
    44     .. attribute:: n 
    45  
    46         Number of instances for which the value was defined. 
    47         If instances were weighted, :obj:`n` holds the sum of weights 
    48          
    49     .. attribute:: sum 
    50  
    51         Weighted sum of values 
    52  
    53     .. attribute:: sum2 
    54  
    55         Weighted sum of squared values 
    56  
    57     .. 
    58         .. attribute:: holdRecomputation 
    59      
    60             Holds recomputation of the average and standard deviation. 
    61  
    62     .. method:: add(value[, weight=1]) 
    63      
    64         Add a value to the statistics: adjust :obj:`min` and :obj:`max` if 
    65         necessary, increase :obj:`n` and recompute :obj:`sum`, :obj:`sum2`, 
    66         :obj:`avg` and :obj:`dev`. 
    67  
    68         :param value: Value to be added to the statistics 
    69         :type value: float 
    70         :param weight: Weight assigned to the value 
    71         :type weight: float 
    72  
    73     .. 
    74         .. method:: recompute() 
    75  
    76             Recompute the average and deviation. 
    77  
    78 .. class:: Domain 
    79  
    80     ``statistics.basic.Domain`` behaves like an ordinary list, except that its 
    81     elements can also be indexed by variable names or descriptors. 
    82  
    83     .. method:: __init__(data[, weight=None]) 
    84  
    85         Compute the statistics for all continuous variables in the data, and put 
    86         :obj:`None` to the places corresponding to variables of other types. 
    87  
    88         :param data: A table of instances 
    89         :type data: Orange.data.Table 
    90         :param weight: The id of the meta-attribute with weights 
    91         :type weight: `int` or none 
    92          
    93     .. method:: purge() 
    94      
    95         Remove the :obj:`None`'s corresponding to non-continuous features; this 
    96         truncates the list, so the indices do not respond to indices of 
    97         variables in the domain. 
    98      
    99     part of :download:`distributions-basic-stat.py <code/distributions-basic-stat.py>` 
    100      
    101     .. literalinclude:: code/distributions-basic-stat.py 
    102         :lines: 1-10 
    103  
    104     Output:: 
    105  
    106              feature   min   max   avg 
    107         sepal length 4.300 7.900 5.843 
    108          sepal width 2.000 4.400 3.054 
    109         petal length 1.000 6.900 3.759 
    110          petal width 0.100 2.500 1.199 
    111  
    112  
    113     part of :download:`distributions-basic-stat.py <code/distributions-basic-stat.py>` 
    114      
    115     .. literalinclude:: code/distributions-basic-stat.py 
    116         :lines: 11- 
    117  
    118     Output:: 
    119  
    120         5.84333467484  
    121  
    122 """ 
    123  
    1241from Orange.core import BasicAttrStat as Variable 
    1252from Orange.core import DomainBasicAttrStat as Domain 
  • Orange/testing/unit/tests/test_tree.py

    r10319 r10412  
    1919    LEARNER = rtree.TreeLearner(max_depth=50) 
    2020 
     21 
     22@datasets_driven(datasets=testing.CLASSIFICATION_DATASETS) 
     23class TestSTLClassification(testing.LearnerTestCase): 
     24    LEARNER = ctree.SimpleTreeLearner(max_depth=50) 
     25 
     26 
     27@datasets_driven(datasets=testing.REGRESSION_DATASETS) 
     28class TestSTLRegression(testing.LearnerTestCase): 
     29    LEARNER = rtree.SimpleTreeLearner(max_depth=50) 
     30 
     31    def test_learner_on(self): 
     32        # Does not pass unittests beacuse it returns None for the distribution. 
     33        # I do not plan on implementing this as it will only add unnecessary overhead. 
     34        pass 
     35 
     36 
    2137if __name__ == "__main__": 
    2238    unittest.main() 
  • docs/reference/rst/Orange.clustering.rst

    r10166 r10421  
    33########################### 
    44 
    5 .. automodule:: Orange.clustering 
     5.. py:currentmodule:: Orange.clustering 
     6 
     7.. index:: clustering 
     8 
     9Orange offers three clustering methods: k-means clustering, hierarchical clustering with different link functions, and consensus clustering, which is still much under development. 
    610 
    711.. toctree:: 
  • docs/reference/rst/Orange.data.discretization.rst

    r10050 r10393  
    6969.. .. autoclass:: Orange.feature.discretization.DiscretizedLearner_Class 
    7070 
    71 .. autoclass:: DiscretizeTable 
     71.. autoclass:: DiscretizeTable(features=None, discretize_class=False, method=EqualFreq(n=3), clean=True) 
    7272 
    7373.. A chapter on `feature subset selection <../ofb/o_fss.htm>`_ in Orange 
  • docs/reference/rst/Orange.evaluation.reliability.rst

    r9683 r10393  
    6666------------------------------------ 
    6767 
    68 .. autoclass:: BaggingVarianceCNeighbours 
     68.. autoclass:: BaggingVarianceCNeighbours(bagv=BaggingVariance(), cnk=CNeighbours()) 
    6969 
    7070Mahalanobis distance 
     
    8181=============================== 
    8282 
    83 .. autoclass:: Learner 
     83.. autoclass:: Learner(box_learner, name="Reliability estimation", estimators=[SensitivityAnalysis(), LocalCrossValidation(), BaggingVarianceCNeighbours(), Mahalanobis(), MahalanobisToCenter()], **kwds) 
    8484    :members: 
    8585 
  • docs/reference/rst/Orange.evaluation.rst

    r9372 r10414  
    33############################# 
    44 
     5Evaluation of prediction modules is split into two parts. Module 
     6:obj:`Orange.evaluation.testing` contains procedures that sample data, 
     7train learning algorithms and test models. All procedures return 
     8results as an instance of 
     9:obj:`~Orange.evaluation.testing.ExperimentResults` that is described 
     10below. Module :obj:`Orange.evaluation.scoring` uses such data to 
     11compute various performance scores like classification accuracy and 
     12AUC. 
     13 
     14There is a third module, which is unrelated to this 
     15scheme,:obj:`Orange.evaluation.reliability`, that assesses the reliability 
     16of individual predictions. 
     17 
    518.. toctree:: 
    619   :maxdepth: 1 
    720 
     21   Orange.evaluation.testing 
    822   Orange.evaluation.scoring 
    9    Orange.evaluation.testing 
    1023   Orange.evaluation.reliability 
    1124 
     25Classes for storing the experimental results 
     26-------------------------------------------- 
     27 
     28 
     29The following two classes are used for storing the results of experiments by :obj:`Orange.evaluation.testing` and computing of scores by :obj:`Orange.evaluation.scoring`. Instances of this class seldom need to be constructed and used outside of these two modules. 
     30 
     31.. py:currentmodule:: Orange.evaluation.testing 
     32 
     33.. autoclass:: ExperimentResults(iterations, classifier_names, class_values=None, weights=None, base_class=-1) 
     34    :members: 
     35 
     36.. autoclass:: TestedExample 
     37    :members: 
  • docs/reference/rst/Orange.evaluation.testing.rst

    r10339 r10414  
    66================================== 
    77 
    8 There are many ways to test prediction models on data. Orange includes 
    9 methods for cross-validation, leave-one out, random sampling and learning 
    10 curves. This methods handle learning of models and prediction of new 
    11 examples; they return :obj:`ExperimentResults` which can be passed to 
    12 :obj:`~Orange.evaluation.scoring` functions to evaluate model. 
     8Module :obj:`Orange.evaluation.testing` contains methods for 
     9cross-validation, leave-one out, random sampling and learning 
     10curves. These procedures split the data onto training and testing set 
     11and use the training data to induce models; models then make 
     12predictions for testing data. Predictions are collected in 
     13:obj:`ExperimentResults`, together with the actual classes and some 
     14other data. The latter can be given to functions 
     15:obj:`~Orange.evaluation.scoring` that compute the performance scores 
     16of models. 
    1317 
    1418.. literalinclude:: code/testing-example.py 
     19 
     20The following call makes 100 iterations of 70:30 test and stores all the 
     21induced classifiers. :: 
     22 
     23     res = Orange.evaluation.testing.proportion_test(learners, iris, 0.7, 100, store_classifiers=1) 
    1524 
    1625Different evaluation techniques are implemented as instance methods of 
    1726:obj:`Evaluation` class. For ease of use, an instance of this class is 
    1827created at module loading time and instance methods are exposed as functions 
    19 with the same name in Orange.evaluation.testing namespace. 
     28in :obj:`Orange.evaluation.testing`. 
     29 
     30Randomness in tests 
     31=================== 
     32 
     33If evaluation method uses random sampling, parameter 
     34``random_generator`` can be used to either provide either a random 
     35seed or an instance of :obj:`~Orange.misc.Random`. If omitted, a new 
     36instance of random generator is constructed for each call of the 
     37method with random seed 0. 
     38 
     39.. note:: 
     40 
     41    Running the same script twice will generally give the same 
     42    results. 
     43 
     44For conducting a repeatable set of experiments, construct an instance 
     45of :obj:`~Orange.misc.Random` and pass it to all of them. This way, 
     46all methods will use different random numbers, but they will be the 
     47same for each run of the script. 
     48 
     49For truly random number, set seed to a random number generated with 
     50python random generator. Since python's random generator is reset each 
     51time python is loaded with current system time as seed, results of the 
     52script will be different each time you run it. 
    2053 
    2154.. autoclass:: Evaluation 
    22    :members: 
    2355 
    24 .. autoclass:: ExperimentResults 
    25     :members: 
     56   .. automethod:: cross_validation 
    2657 
    27 .. autoclass:: TestedExample 
    28     :members: 
     58   .. automethod:: leave_one_out 
    2959 
    30 Generating random numbers 
    31 ========================= 
     60   .. automethod:: proportion_test 
    3261 
    33 Many evaluation 
     62   .. automethod:: test_with_indices 
    3463 
    35 *stratified* 
    36     Tells whether to stratify the random selections. Its default value is 
    37     :obj:`orange.StratifiedIfPossible` which stratifies selections 
    38     if the class variable is discrete and has no unknown values. 
     64   .. automethod:: one_fold_with_indices 
    3965 
    40 *random_generator* 
    41     If evaluation method relies on randomness, parameter ``random_generator`` 
    42     can be used to either provide a random seed or an instance of 
    43     :obj:`~Orange.misc.Random` which will be used to generate random numbers. 
     66   .. automethod:: learn_and_test_on_learn_data 
    4467 
    45     By default, a new instance of random generator is constructed for each 
    46     call of the method with random seed 0. 
     68   .. automethod:: learn_and_test_on_test_data 
    4769 
    48     If you use more than one method that is based on randomness, 
    49     you can construct an instance of :obj:`~Orange.misc.Random` and pass it 
    50     to all of them. This way, all methods will use different random numbers, 
    51     but this numbers will be the same for each run of the script. 
     70   .. automethod:: learning_curve(learners, examples, cv_indices=None, proportion_indices=None, proportions=[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0], preprocessors=(), random_generator=0, callback=None)¶ 
    5271 
    53     For truly random number, set seed to a random number generated with 
    54     python random generator. Since python's random generator is reset each 
    55     time python is loaded with current system time as seed, 
    56     results of the script will be different each time you run it. 
     72   .. automethod:: learning_curve_n(learners, examples, folds=10, proportions=[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0], stratification=StratifiedIfPossible, preprocessors=(), random_generator=0, callback=None) 
    5773 
    58 *preprocessors* 
    59     A list of preprocessors. It consists of tuples ``(c, preprocessor)``, 
    60     where ``c`` determines whether the preprocessor will be applied 
    61     to the learning set (``"L"``), test set (``"T"``) or to both 
    62     (``"B"``). The latter is applied first, when the example set is still 
    63     undivided. The ``"L"`` and ``"T"`` preprocessors are applied on the 
    64     separated subsets. Preprocessing testing examples is allowed only 
    65     on experimental procedures that do not report the TestedExample's 
    66     in the same order as examples in the original set. The second item 
    67     in the tuple, preprocessor can be either a pure Orange or a pure 
    68     Python preprocessor, that is, any function or callable class that 
    69     accepts a table of examples and weight, and returns a preprocessed 
    70     table and weight. 
     74   .. automethod:: learning_curve_with_test_data(learners, learn_set, test_set, times=10, proportions=[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0], stratification=StratifiedIfPossible, preprocessors=(), random_generator=0, store_classifiers=False, store_examples=False) 
    7175 
    72     This example will demonstrate the devastating effect of 100% class 
    73     noise on learning. :: 
     76   .. automethod:: test_on_data 
    7477 
    75         classnoise = orange.Preprocessor_addClassNoise(proportion=1.0) 
    76         res = Orange.evaluation.testing.proportion_test(learners, data, 0.7, 100, pps = [("L", classnoise)]) 
    77  
    78 *store_classifiers (keyword argument)* 
    79     If this flag is set, the testing procedure will store the constructed 
    80     classifiers. For each iteration of the test (eg for each fold in 
    81     cross validation, for each left out example in leave-one-out...), 
    82     the list of classifiers is appended to the ExperimentResults' 
    83     field classifiers. 
    84  
    85     The script below makes 100 repetitions of 70:30 test and store the 
    86     classifiers it induces. :: 
    87  
    88         res = Orange.evaluation.testing.proportion_test(learners, data, 0.7, 
    89         100, store_classifiers=1) 
    90  
    91  
    92 Knowing classes :obj:`TestedExample` that stores results of testing 
    93 for a single test example and :obj:`ExperimentResults` that stores a list of 
    94 TestedExamples along with some other data on experimental procedures 
    95 and classifiers used, is important if you would like to write your own 
    96 measures of quality of models, compatible the sampling infrastructure 
    97 provided by Orange. If not, you can skip the remainder of this page. 
    98  
    99  
    100  
    101 References 
    102 ========== 
    103  
    104 Salzberg, S. L. (1997). On comparing classifiers: Pitfalls to avoid 
    105 and a recommended approach. Data Mining and Knowledge Discovery 1, 
    106 pages 317-328. 
    107  
  • docs/reference/rst/Orange.feature.scoring.rst

    r10170 r10393  
    422422   :members: 
    423423 
    424 .. autofunction:: Orange.feature.scoring.score_all 
     424.. autofunction:: Orange.feature.scoring.score_all(data, score=Relief(k=20, m=50)) 
    425425 
    426426.. rubric:: Bibliography 
  • docs/reference/rst/Orange.multilabel.rst

    r9928 r10417  
    66.org/wiki/Multi-label_classification>`_ is a machine learning prediction 
    77problem in which multiple binary variables (i.e. labels) are being predicted. 
    8 Orange supports such a task, although the set of available methods is 
    9 currently rather limited. 
     8Orange offers a limited number of methods for this task. 
    109 
    1110Multi-label data is represented as :ref:`multi-target data <multiple-classes>` 
     
    1413using :ref:`multiclass directive <tab-delimited>`. 
    1514 
    16 .. automodule:: Orange.multilabel 
    17  
    18 .. toctree:: 
    19    :maxdepth: 1 
    20  
    21    Orange.multilabel.br 
    22    Orange.multilabel.lp 
    23    Orange.multilabel.multiknn 
    24    Orange.multilabel.mlknn 
    25    Orange.multilabel.brknn 
     15.. automodule:: Orange.multilabel.br 
     16.. automodule:: Orange.multilabel.lp 
     17.. automodule:: Orange.multilabel.multiknn 
     18.. automodule:: Orange.multilabel.mlknn 
     19.. automodule:: Orange.multilabel.brknn 
  • docs/reference/rst/Orange.regression.mean.rst

    r10388 r10393  
    88 
    99Accuracy of a regressor is often compared with the accuracy achieved 
    10 by always predicting the averag value. The "learning algorithm" 
     10by always predicting the average value. The "learning algorithm" 
    1111computes the average and represents it with a regressor of type 
    1212:obj:`Orange.classification.ConstantClassifier`. 
  • docs/reference/rst/Orange.regression.rst

    r9372 r10396  
    33########################### 
    44 
     5Orange uses the term `classification` to also denote the 
     6regression. For instance, the dependent variable is called a `class 
     7variable` even when it is continuous, and models are generally called 
     8classifiers. A part of the reason is that classification and 
     9regression rely on the same set of basic classes. 
     10 
     11Please see the documentation on :doc:`Orange.classification` for 
     12information on how to fit models in general. 
     13 
     14Orange contains a number of regression models which are listed below. 
     15 
    516.. toctree:: 
    6    :maxdepth: 4 
     17   :maxdepth: 1 
    718 
    819   Orange.regression.mean 
    9    Orange.regression.base 
    1020   Orange.regression.linear 
    1121   Orange.regression.lasso 
     
    1424   Orange.regression.tree 
    1525 
     26.. automodule:: Orange.regression.base 
  • docs/reference/rst/Orange.statistics.basic.rst

    r9372 r10397  
    1 .. automodule:: Orange.statistics.basic 
     1.. py:currentmodule:: Orange.statistics.basic 
     2 
     3.. index:: Basic Statistics for Continuous Features 
     4 
     5==================================================== 
     6Basic Statistics for Continuous Features (``basic``) 
     7==================================================== 
     8 
     9The are two simple classes for computing basic statistics 
     10for continuous features, such as their minimal and maximal value 
     11or average: :class:`Orange.statistics.basic.Variable` holds the statistics for a single variable 
     12and :class:`Orange.statistics.basic.Domain` behaves like a list of instances of 
     13the above class for all variables in the domain. 
     14 
     15.. class:: Variable 
     16 
     17    Computes and stores minimal, maximal, average and 
     18    standard deviation of a variable. It does not include the median or any 
     19    other statistics that can be computed on the fly, without remembering the 
     20    data; such statistics can be obtained classes from module :obj:`Orange.statistics.distribution`. 
     21 
     22    Instances of this class are seldom constructed manually; they are more often 
     23    returned by :obj:`Domain` described below. 
     24 
     25    .. attribute:: variable 
     26     
     27        The variable to which the data applies. 
     28 
     29    .. attribute:: min 
     30 
     31        Minimal value encountered 
     32 
     33    .. attribute:: max 
     34 
     35        Maximal value encountered 
     36 
     37    .. attribute:: avg 
     38 
     39        Average value 
     40 
     41    .. attribute:: dev 
     42 
     43        Standard deviation 
     44 
     45    .. attribute:: n 
     46 
     47        Number of instances for which the value was defined. 
     48        If instances were weighted, :obj:`n` holds the sum of weights 
     49         
     50    .. attribute:: sum 
     51 
     52        Weighted sum of values 
     53 
     54    .. attribute:: sum2 
     55 
     56        Weighted sum of squared values 
     57 
     58    .. 
     59        .. attribute:: holdRecomputation 
     60     
     61            Holds recomputation of the average and standard deviation. 
     62 
     63    .. method:: add(value[, weight=1]) 
     64     
     65        Add a value to the statistics: adjust :obj:`min` and :obj:`max` if 
     66        necessary, increase :obj:`n` and recompute :obj:`sum`, :obj:`sum2`, 
     67        :obj:`avg` and :obj:`dev`. 
     68 
     69        :param value: Value to be added to the statistics 
     70        :type value: float 
     71        :param weight: Weight assigned to the value 
     72        :type weight: float 
     73 
     74    .. 
     75        .. method:: recompute() 
     76 
     77            Recompute the average and deviation. 
     78 
     79.. class:: Domain 
     80 
     81    ``statistics.basic.Domain`` behaves like an ordinary list, except that its 
     82    elements can also be indexed by variable names or descriptors. 
     83 
     84    .. method:: __init__(data[, weight=None]) 
     85 
     86        Compute the statistics for all continuous variables in the data, and put 
     87        :obj:`None` to the places corresponding to variables of other types. 
     88 
     89        :param data: A table of instances 
     90        :type data: Orange.data.Table 
     91        :param weight: The id of the meta-attribute with weights 
     92        :type weight: `int` or none 
     93         
     94    .. method:: purge() 
     95     
     96        Remove the :obj:`None`'s corresponding to non-continuous features; this 
     97        truncates the list, so the indices do not respond to indices of 
     98        variables in the domain. 
     99     
     100    part of :download:`distributions-basic-stat.py <code/distributions-basic-stat.py>` 
     101     
     102    .. literalinclude:: code/distributions-basic-stat.py 
     103        :lines: 1-10 
     104 
     105    Output:: 
     106 
     107             feature   min   max   avg 
     108        sepal length 4.300 7.900 5.843 
     109         sepal width 2.000 4.400 3.054 
     110        petal length 1.000 6.900 3.759 
     111         petal width 0.100 2.500 1.199 
     112 
     113 
     114    part of :download:`distributions-basic-stat.py <code/distributions-basic-stat.py>` 
     115     
     116    .. literalinclude:: code/distributions-basic-stat.py 
     117        :lines: 11- 
     118 
     119    Output:: 
     120 
     121        5.84333467484  
  • docs/reference/rst/Orange.statistics.contingency.rst

    r10246 r10397  
    1 .. py:currentmodule::Orange.statistics.contingency 
     1.. py:currentmodule:: Orange.statistics.contingency 
    22 
    33.. index:: Contingency table 
    44 
    5 ================= 
    6 Contingency table 
    7 ================= 
     5=================================== 
     6Contingency table (``contingency``) 
     7=================================== 
    88 
    99Contingency table contains conditional distributions. Unless explicitly 
  • docs/reference/rst/Orange.statistics.distribution.rst

    r10372 r10397  
    33.. index:: Distributions 
    44 
    5 ============= 
    6 Distributions 
    7 ============= 
     5================================ 
     6Distributions (``distribution``) 
     7================================ 
    88 
    99:obj:`Distribution` and derived classes store empirical 
  • docs/widgets/rst/conf.py

    r9388 r10405  
    2323# Add any Sphinx extension module names here, as strings. They can be extensions 
    2424# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 
    25 extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.pngmath'] 
     25extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 
     26              'sphinx.ext.pngmath'] 
    2627 
    2728# Add any paths that contain templates here, relative to this directory. 
     
    167168# relative to this directory. They are copied after the builtin static files, 
    168169# so a file named "default.css" will overwrite the builtin "default.css". 
    169 html_static_path = [] 
     170 
     171# The old widgets documentation is copied here 
     172html_static_path = ["../../../Orange/doc/widgets"] 
    170173 
    171174# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 
     
    289292  
    290293class StampListDecorate(Transform): 
     294    """Decorate a list with pixmap bullet points. 
     295     
     296    Example:: 
     297     
     298        .. rst-class: stamp-list 
     299         
     300            1. First 
     301            2. Second 
     302         
     303         
     304    """ 
    291305    default_priority = 1000 
    292306    def apply(self): 
  • docs/widgets/rst/index.rst

    r9387 r10419  
    33############## 
    44 
    5 Contents: 
     5 
     6Data 
     7---- 
     8 
     9.. The first row shows how to insert rst documentation, the rest  
     10   link to the old documentation that is copied to '_static'  
     11 
     12.. list-table:: 
     13    :class: widget-catalog-table 
     14    :widths: 1 1 1 
     15     
     16    * - |File_icon| :ref:`File` 
     17      - |DataTable_icon| :ref:`Data Table` 
     18      - |SelectAttributes_icon| :ref:`Select Attributes` 
     19    * - |Rank_icon| `Rank`_ 
     20      - |PurgeDomain_icon| `Purge Domain`_ 
     21      - |MergeData_icon| `Merge Data`_ 
     22    * - |Concat_icon| `Concatenate`_ 
     23      - |DataSampler_icon| `Data Sampler`_ 
     24      - |SelectData_icon| `Select Data`_ 
     25    * - |Save_icon| `Save`_ 
     26      - |Discretize_icon| `Discretize`_ 
     27      - |Cont_icon| `Continuize`_ 
     28    * - |Impute_icon| `Impute`_ 
     29      - |Outliers_icon| `Outliers`_ 
     30      - 
     31 
     32.. _`Rank`: _static/Data/Rank.htm 
     33 
     34.. _`Purge Domain`: _static/Data/PurgeDomain.htm 
     35 
     36.. _`Merge Data`: _static/Data/MergeData.htm 
     37 
     38.. _`Concatenate`: _static/Data/Concatenate.htm 
     39 
     40.. _`Data Sampler`: _static/Data/DataSampler.htm 
     41 
     42.. _`Select Data`: _static/Data/SelectData.htm 
     43 
     44.. _`Save`: _static/Data/Save.htm 
     45 
     46.. _`Discretize`: _static/Data/Discretize.htm 
     47 
     48.. _`Continuize`: _static/Data/Continuize.htm 
     49 
     50.. _`Impute`: _static/Data/Impute.htm 
     51 
     52.. _`Outliers`: _static/Data/Outliers.htm 
     53 
     54 
     55.. |File_icon| image:: data/images/File_icon.png 
     56    :align: middle 
     57    :alt: File 
     58    :width: 48 
     59    :height: 48 
     60 
     61.. |DataTable_icon| image:: data/images/DataTable_icon.png 
     62    :align: middle 
     63    :alt: Data Table 
     64    :width: 48 
     65    :height: 48 
     66     
     67.. |SelectAttributes_icon| image:: data/images/SelectAttributes_icon.png 
     68    :align: middle 
     69    :alt: Select Attributes 
     70    :width: 48 
     71    :height: 48 
     72     
     73.. |Rank_icon| image:: ../../../Orange/OrangeWidgets/Data/icons/Rank_48.png 
     74    :align: middle 
     75    :alt: Rank 
     76    :width: 48 
     77    :height: 48 
     78     
     79.. |PurgeDomain_icon| image:: ../../../Orange/OrangeWidgets/Data/icons/PurgeDomain_48.png 
     80    :align: middle 
     81    :alt: Purge Domain 
     82    :width: 48 
     83    :height: 48 
     84     
     85.. |MergeData_icon| image:: ../../../Orange/OrangeWidgets/Data/icons/MergeData_48.png 
     86    :align: middle 
     87    :alt: Merge Data 
     88    :width: 48 
     89    :height: 48 
     90 
     91.. |Concat_icon| image:: ../../../Orange/OrangeWidgets/Data/icons/Concatenate_48.png 
     92    :align: middle 
     93    :alt: Merge Data 
     94    :width: 48 
     95    :height: 48 
     96 
     97.. |DataSampler_icon| image:: ../../../Orange/OrangeWidgets/Data/icons/DataSampler_48.png 
     98    :align: middle 
     99    :alt: Merge Data 
     100    :width: 48 
     101    :height: 48 
     102 
     103.. |SelectData_icon| image:: ../../../Orange/OrangeWidgets/Data/icons/SelectData_48.png 
     104    :align: middle 
     105    :alt: Merge Data 
     106    :width: 48 
     107    :height: 48 
     108 
     109.. |Save_icon| image:: ../../../Orange/OrangeWidgets/Data/icons/Save_48.png 
     110    :align: middle 
     111    :alt: Merge Data 
     112    :width: 48 
     113    :height: 48 
     114 
     115.. |Discretize_icon| image:: ../../../Orange/OrangeWidgets/Data/icons/Discretize_48.png 
     116    :align: middle 
     117    :alt: Merge Data 
     118    :width: 48 
     119    :height: 48 
     120 
     121.. |Cont_icon| image:: ../../../Orange/OrangeWidgets/Data/icons/Continuize_48.png 
     122    :align: middle 
     123    :alt: Merge Data 
     124    :width: 48 
     125    :height: 48 
     126     
     127.. |Impute_icon| image:: ../../../Orange/OrangeWidgets/Data/icons/Impute_48.png 
     128    :align: middle 
     129    :alt: Merge Data 
     130    :width: 48 
     131    :height: 48 
     132     
     133.. |Outliers_icon| image:: ../../../Orange/OrangeWidgets/Data/icons/Outliers_48.png 
     134    :align: middle 
     135    :alt: Merge Data 
     136    :width: 48 
     137    :height: 48 
     138 
     139 
     140Visualize 
     141--------- 
     142 
     143.. list-table:: 
     144    :class: widget-catalog-table 
     145    :widths: 1 1 1 
     146     
     147    * - |Distributions_icon| `Distributions`_ 
     148      - |ScatterPlot_icon| `Scatter Plot`_ 
     149      - |AttributeStatistics_icon| `Attribute Statistics`_ 
     150    * - |LinearProjection_icon| `Linear Projection`_ 
     151      - |Radviz_icon| `Radviz`_ 
     152      - |Polyviz_icon| `Polyviz`_ 
     153    * - |ParallelCoords_icon| `Parallel Coordinates`_ 
     154      - |SurveyPlot_icon| `Survey Plot`_ 
     155      - |MosaicDisplay_icon| `Mosaic Display`_ 
     156    * - |SieveDiagram_icon| `Sieve Diagram`_ 
     157      - |SieveMultigram_icon| Sieve Multigram  
     158      - 
     159     
     160.. _`Distributions`: _static/Visualize/Distributions.htm 
     161 
     162.. _`Scatter Plot`: _static/Visualize/Scatterplot.htm 
     163 
     164.. _`Attribute Statistics`: _static/Visualize/AttributeStatistics.htm 
     165 
     166.. _`Linear Projection`: _static/Visualize/LinearProjection.htm 
     167 
     168.. _`Radviz`: _static/Visualize/Radviz.htm 
     169 
     170.. _`Polyviz`: _static/Visualize/Polyviz.htm 
     171 
     172.. _`Parallel Coordinates`: _static/Visualize/ParallelCoordinates.htm 
     173 
     174.. _`Survey Plot`: _static/Visualize/SurveyPlot.htm 
     175 
     176.. _`Mosaic Display`: _static/Visualize/MosaicDisplay.htm 
     177 
     178.. _`Sieve Diagram`: _static/Visualize/SieveDiagram.htm 
     179 
     180 
     181.. |Distributions_icon| image:: ../../../Orange/OrangeWidgets/Visualize/icons/Distribution_48.png 
     182    :align: middle 
     183    :alt: Distributions 
     184    :width: 48 
     185    :height: 48 
     186     
     187.. |ScatterPlot_icon| image:: ../../../Orange/OrangeWidgets/Visualize/icons/ScatterPlot_48.png 
     188    :align: middle 
     189    :alt: Scatter Plot 
     190    :width: 48 
     191    :height: 48 
     192     
     193.. |AttributeStatistics_icon| image:: ../../../Orange/OrangeWidgets/Visualize/icons/AttributeStatistics_48.png 
     194    :align: middle 
     195    :alt: AttributeStatistics 
     196    :width: 48 
     197    :height: 48 
     198     
     199.. |LinearProjection_icon| image:: ../../../Orange/OrangeWidgets/Visualize/icons/LinearProjection_48.png 
     200    :align: middle 
     201    :alt: Linear Projection 
     202    :width: 48 
     203    :height: 48 
     204     
     205.. |Radviz_icon| image:: ../../../Orange/OrangeWidgets/Visualize/icons/Radviz_48.png 
     206    :align: middle 
     207    :alt: Rad Viz 
     208    :width: 48 
     209    :height: 48 
     210     
     211.. |Polyviz_icon| image:: ../../../Orange/OrangeWidgets/Visualize/icons/Polyviz_48.png 
     212    :align: middle 
     213    :alt: Poly Viz 
     214    :width: 48 
     215    :height: 48 
     216     
     217.. |ParallelCoords_icon| image:: ../../../Orange/OrangeWidgets/Visualize/icons/ParallelCoordinates_48.png 
     218    :align: middle 
     219    :alt: Parallel Coordinates 
     220    :width: 48 
     221    :height: 48 
     222     
     223.. |SurveyPlot_icon| image:: ../../../Orange/OrangeWidgets/Visualize/icons/SurveyPlot_48.png 
     224    :align: middle 
     225    :alt: Survey Plot 
     226    :width: 48 
     227    :height: 48 
     228     
     229.. |MosaicDisplay_icon| image:: ../../../Orange/OrangeWidgets/Visualize/icons/MosaicDisplay_48.png 
     230    :align: middle 
     231    :alt: Mosaic Display 
     232    :width: 48 
     233    :height: 48 
     234     
     235.. |SieveDiagram_icon| image:: ../../../Orange/OrangeWidgets/Visualize/icons/SieveDiagram_48.png 
     236    :align: middle 
     237    :alt: Sieve Diagram 
     238    :width: 48 
     239    :height: 48 
     240     
     241.. |SieveMultigram_icon| image:: ../../../Orange/OrangeWidgets/Visualize/icons/SieveMultigram_48.png 
     242    :align: middle 
     243    :alt: Sieve Multigram 
     244    :width: 48 
     245    :height: 48 
     246 
     247 
     248Classify 
     249-------- 
     250     
     251.. list-table:: 
     252    :class: widget-catalog-table 
     253    :widths: 1 1 1 
     254     
     255    * - |NaiveBayes_icon| `Naive Bayes`_ 
     256      - |SVM_icon| `SVM`_ 
     257      - |LogReg_icon| `Logistic Regression`_ 
     258    * - |Majority_icon| `Majority`_ 
     259      - |ClassificationTree_icon| `Classification Tree`_ 
     260      - |ClassificationTreeGraph_icon| `Classification Tree Graph`_  
     261    * - |ClsTreeViewer_icon| `Classification Tree Viewer`_ 
     262      - |CN2Rules_icon| `CN2 Rules`_ 
     263      - |CN2RulesViewer_icon| `CN2 Rules Viewer`_ 
     264    * - |kNN_icon| `k-Nearest Neighbours`_ 
     265      - |Nomogram_icon| `Nomogram`_ 
     266      - |RandomForest_icon| `Random Forest`_ 
     267    * - |C4.5_icon| `C4.5`_ 
     268      - |ITreeBuilder_icon| `Interactive Tree Builder`_ 
     269      - 
     270 
     271.. _`Naive Bayes`: _static/Classify/NaiveBayes.htm 
     272 
     273.. _`SVM`: _static/Classify/SVM.htm 
     274 
     275.. _`Logistic Regression`: _static/Classify/LogisticRegression.htm 
     276 
     277.. _`Majority`: _static/Classify/Majority.htm 
     278 
     279.. _`Classification Tree`: _static/Classify/ClassificationTree.htm 
     280 
     281.. _`Classification Tree Graph`: _static/Classify/ClassificationTreeGraph.htm 
     282 
     283.. _`Classification Tree Viewer`: _static/Classify/ClassificationTreeViewer.htm 
     284 
     285.. _`CN2 Rules`: _static/Classify/CN2.htm 
     286 
     287.. _`CN2 Rules Viewer`: _static/Classify/CN2.htm 
     288 
     289.. _`k-Nearest Neighbours`: _static/Classify/kNearestNeighbours.htm 
     290 
     291.. _`Nomogram`: _static/Classify/Nomogram.htm 
     292 
     293.. _`Random Forest`: _static/Classify/RandomForest.htm 
     294 
     295.. _`C4.5`: _static/Classify/C4.5.htm 
     296 
     297.. _`Interactive Tree Builder`: _static/Classify/InteractiveTreeBuilder.htm 
     298 
     299 
     300.. |NaiveBayes_icon| image:: ../../../Orange/OrangeWidgets/Classify/icons/NaiveBayes_48.png 
     301    :align: middle 
     302    :alt: Naive Bayes 
     303    :width: 48 
     304    :height: 48 
     305 
     306.. |SVM_icon| image:: ../../../Orange/OrangeWidgets/Classify/icons/BasicSVM_48.png 
     307    :align: middle 
     308    :alt: Support Vector Machines 
     309    :width: 48 
     310    :height: 48 
     311 
     312.. |LogReg_icon| image:: ../../../Orange/OrangeWidgets/Classify/icons/LogisticRegression_48.png 
     313    :align: middle 
     314    :alt: Logistic Regression 
     315    :width: 48 
     316    :height: 48 
     317 
     318.. |Majority_icon| image:: ../../../Orange/OrangeWidgets/Classify/icons/Majority_48.png 
     319    :align: middle 
     320    :alt: Majority 
     321    :width: 48 
     322    :height: 48 
     323 
     324.. |ClassificationTree_icon| image:: ../../../Orange/OrangeWidgets/Classify/icons/ClassificationTree_48.png 
     325    :align: middle 
     326    :alt: Classification Tree 
     327    :width: 48 
     328    :height: 48 
     329 
     330.. |ClassificationTreeGraph_icon| image:: ../../../Orange/OrangeWidgets/Classify/icons/ClassificationTreeGraph_48.png 
     331    :align: middle 
     332    :alt: Classification Tree Graph 
     333    :width: 48 
     334    :height: 48 
     335 
     336.. |ClsTreeViewer_icon| image:: ../../../Orange/OrangeWidgets/Classify/icons/ClassificationTreeViewer_48.png 
     337    :align: middle 
     338    :alt: Classification Tree Viewer 
     339    :width: 48 
     340    :height: 48 
     341 
     342.. |CN2Rules_icon| image:: ../../../Orange/OrangeWidgets/Classify/icons/CN2_48.png 
     343    :align: middle 
     344    :alt: CN2 Rules 
     345    :width: 48 
     346    :height: 48 
     347 
     348.. |CN2RulesViewer_icon| image:: ../../../Orange/OrangeWidgets/Classify/icons/CN2RulesViewer_48.png 
     349    :align: middle 
     350    :alt: CN2 Rules Viewer 
     351    :width: 48 
     352    :height: 48 
     353 
     354.. |kNN_icon| image:: ../../../Orange/OrangeWidgets/Classify/icons/kNearestNeighbours_48.png 
     355    :align: middle 
     356    :alt: k-Nearest Neighbours 
     357    :width: 48 
     358    :height: 48 
     359 
     360.. |Nomogram_icon| image:: ../../../Orange/OrangeWidgets/Classify/icons/Nomogram_48.png 
     361    :align: middle 
     362    :alt: Nomogram 
     363    :width: 48 
     364    :height: 48 
     365 
     366.. |RandomForest_icon| image:: ../../../Orange/OrangeWidgets/Classify/icons/RandomForest_48.png 
     367    :align: middle 
     368    :alt: Random Forest 
     369    :width: 48 
     370    :height: 48 
     371 
     372.. |C4.5_icon| image:: ../../../Orange/OrangeWidgets/Classify/icons/C45_48.png 
     373    :align: middle 
     374    :alt: C4.5 
     375    :width: 48 
     376    :height: 48 
     377 
     378.. |ITreeBuilder_icon| image:: ../../../Orange/OrangeWidgets/Classify/icons/ITree_48.png 
     379    :align: middle 
     380    :alt: Interactive Tree Builder 
     381    :width: 48 
     382    :height: 48 
     383 
     384 
     385Regression 
     386---------- 
     387 
     388.. list-table:: 
     389    :widths: 1 1 1 
     390    :class: widget-catalog-table 
     391     
     392    * - |Mean_icon| :ref:`Mean` 
     393      - |LinearRegression_icon| :ref:`Linear Regression` 
     394      - |Earth_icon| :ref:`Earth` 
     395    * - |RegressionTree_icon| `Regression Tree`_ 
     396      - |RegressionTreeGraph_icon| `Regression Tree Graph`_ 
     397      - |Pade_icon| `Pade`_ 
     398 
     399.. _`Regression Tree`: _static/Regression/RegressionTree.htm 
     400 
     401.. _`Regression Tree Graph`: _static/Regression/RegressionTreeGraph.htm 
     402 
     403.. _`Pade`: _static/Regression/Pade.htm 
     404 
     405.. |Mean_icon| image:: ../../../Orange/OrangeWidgets/icons/Unknown.png 
     406    :align: middle 
     407    :alt: Mean 
     408    :width: 48 
     409    :height: 48 
     410 
     411.. |LinearRegression_icon| image:: ../../../Orange/OrangeWidgets/icons/Unknown.png 
     412    :align: middle 
     413    :alt: Linear Regression 
     414    :width: 48 
     415    :height: 48 
     416 
     417.. |Earth_icon| image:: ../../../Orange/OrangeWidgets/icons/Unknown.png 
     418    :align: middle 
     419    :alt: Earth 
     420    :width: 48 
     421    :height: 48 
     422 
     423.. |RegressionTree_icon| image:: ../../../Orange/OrangeWidgets/Regression/icons/RegressionTree.png 
     424    :align: middle 
     425    :alt: Regression Tree 
     426    :width: 48 
     427    :height: 48 
     428 
     429.. |RegressionTreeGraph_icon| image:: ../../../Orange/OrangeWidgets/Regression/icons/RegressionTreeGraph.png 
     430    :align: middle 
     431    :alt: Regression Tree Graph 
     432    :width: 48 
     433    :height: 48 
     434 
     435.. |Pade_icon| image:: ../../../Orange/OrangeWidgets/Regression/icons/Pade.png  
     436    :align: middle 
     437    :alt: Pade 
     438    :width: 48 
     439    :height: 48 
     440 
     441 
     442Evaluate 
     443-------- 
     444 
     445.. list-table:: 
     446    :widths: 1 1 1 
     447    :class: widget-catalog-table 
     448     
     449    * - |ConfusionMatrix_icon| `Confusion Matrix`_   
     450      - |ROCAnalysis_icon| `ROC Analysis`_ 
     451      - |LiftCurve_icon| `Lift Curve`_ 
     452    * - |CalibrationPlot_icon| `Calibration Plot`_ 
     453      - |TestLearners_icon| `Test Learners`_ 
     454      - |Predictions_icon| `Predictions`_ 
     455 
     456.. _`Confusion Matrix`: _static/Evaluate/ConfusionMatrix.htm 
     457 
     458.. _`ROC Analysis`: _static/Evaluate/ROCAnalysis.htm 
     459 
     460.. _`Lift Curve`: _static/Evaluate/LiftCurve.htm 
     461 
     462.. _`Calibration Plot`: _static/Evaluate/CalibrationPlot.htm 
     463 
     464.. _`Test Learners`: _static/Evaluate/TestLearners.htm 
     465 
     466.. _`Predictions`: _static/Evaluate/Predictions.htm 
     467 
     468.. |ConfusionMatrix_icon| image:: ../../../Orange/OrangeWidgets/Evaluate/icons/ConfusionMatrix_48.png  
     469    :align: middle 
     470    :alt: Confusion Matrix 
     471    :width: 48 
     472    :height: 48 
     473 
     474.. |ROCAnalysis_icon| image:: ../../../Orange/OrangeWidgets/Evaluate/icons/ROCAnalysis_48.png  
     475    :align: middle 
     476    :alt: Pade 
     477    :width: 48 
     478    :height: 48 
     479 
     480.. |LiftCurve_icon| image:: ../../../Orange/OrangeWidgets/Evaluate/icons/LiftCurve_48.png  
     481    :align: middle 
     482    :alt: Lift Curve 
     483    :width: 48 
     484    :height: 48 
     485 
     486.. |CalibrationPlot_icon| image:: ../../../Orange/OrangeWidgets/Evaluate/icons/CalibrationPlot_48.png  
     487    :align: middle 
     488    :alt: Calibration Plot 
     489    :width: 48 
     490    :height: 48 
     491 
     492.. |TestLearners_icon| image:: ../../../Orange/OrangeWidgets/Evaluate/icons/TestLearners_48.png  
     493    :align: middle 
     494    :alt: Test Learners 
     495    :width: 48 
     496    :height: 48 
     497 
     498.. |Predictions_icon| image:: ../../../Orange/OrangeWidgets/Evaluate/icons/Predictions_48.png  
     499    :align: middle 
     500    :alt: Predictions 
     501    :width: 48 
     502    :height: 48 
     503 
     504 
     505Associate 
     506--------- 
     507 
     508.. list-table:: 
     509    :widths: 1 1 1 
     510    :class: widget-catalog-table 
     511     
     512    * - |AssociationRules_icon| `Association Rules`_  
     513      - |AssociationRulesFilter_icon| `Association Rules Filter`_  
     514      - |AssociationRulesExplorer_icon| `Association Rules Explorer`_  
     515    * - |ItemSets_icon| `Item Sets`   
     516      - |ItemSetsExplorer_icon| `Item Sets Explorer`  
     517      - 
     518 
     519.. _`Association Rules`: _static/Associate/AssociationRules.htm 
     520 
     521.. _`Association Rules Filter`: _static/Associate/AssociationRulesFilter.htm 
     522 
     523.. _`Association Rules Explorer`: _static/Associate/AssociationRulesExplorer.htm 
     524 
     525.. _`Item Sets`: _static/Associate/ItemSets.htm 
     526 
     527.. _`Item Sets Explorer`: _static/Associate/ItemSetsExplorer.htm 
     528 
     529.. |AssociationRules_icon| image:: ../../../Orange/OrangeWidgets/Associate/icons/AssociationRules_48.png 
     530    :align: middle 
     531    :alt: Association Rules 
     532    :width: 48 
     533    :height: 48 
     534 
     535.. |AssociationRulesFilter_icon| image:: ../../../Orange/OrangeWidgets/Associate/icons/AssociationRulesViewer_48.png 
     536    :align: middle 
     537    :alt: AssociationRulesFilter 
     538    :width: 48 
     539    :height: 48 
     540 
     541.. |AssociationRulesExplorer_icon| image:: ../../../Orange/OrangeWidgets/Associate/icons/AssociationRulesTreeViewer_48.png 
     542    :align: middle 
     543    :alt: Association Rules Explorer 
     544    :width: 48 
     545    :height: 48 
     546 
     547.. |ItemSets_icon| image:: ../../../Orange/OrangeWidgets/Associate/icons/Itemsets_48.png 
     548    :align: middle 
     549    :alt: Item Sets 
     550    :width: 48 
     551    :height: 48 
     552 
     553.. |ItemSetsExplorer_icon| image:: ../../../Orange/OrangeWidgets/Associate/icons/ItemsetsExplorer_48.png 
     554    :align: middle 
     555    :alt: Item Sets Explorer 
     556    :width: 48 
     557    :height: 48 
     558 
     559 
     560Unsupervised 
     561------------ 
     562 
     563.. list-table:: 
     564    :widths: 1 1 1 
     565    :class: widget-catalog-table 
     566     
     567    * - |DistanceFile_icon| `Distance File`_ 
     568      - |SaveDistanceFile_icon| `Save Distance File` 
     569      - |MatrixTransformation_icon| `Matrix Transformation` 
     570    * - |DistanceMatrixFilter_icon| `Distance Matrix Filter` 
     571      - |DistanceMap_icon| `Distance Map`_ 
     572      - |ExampleDistance_icon| `Example Distance`_ 
     573    * - |AttributeDistance_icon| `AttributeDistance`_ 
     574      - |HierarchicalClustering_icon| `Hierarchical Clustering`_ 
     575      - |InteractionGraph_icon| `Interaction Graph`_ 
     576    * - |kMeansClustering_icon| `k-Means Clustering`_ 
     577      - |MDS_icon| `MDS`_ 
     578      - |NetworkFile_icon| `Network File` 
     579    * - |NetExplorer_icon| `Net Explorer`_ 
     580      - |NetworkFromDistances_icon| `Network From Distances` 
     581      - |SOM_icon| `SOM` 
     582    * - |SOMVisualizer_icon| `SOM Visualizer` 
     583      - 
     584      - 
     585 
     586 
     587.. _`Distance File`: _static/Unsupervised/DistanceFile.htm 
     588 
     589.. _`Save Distance File`: _static/Unsupervised/SaveDistanceFile.htm 
     590 
     591.. _`Matrix Transformation`: _static/Unsupervised/`MatrixTransformation.htm 
     592 
     593.. _`Distance Matrix Filter`: _static/Unsupervised/DistanceMatrixFilter.htm 
     594 
     595.. _`Distance Map`: _static/Unsupervised/DistanceMap.htm 
     596 
     597.. _`Example Distance`: _static/Unsupervised/ExampleDistance.htm 
     598 
     599.. _`AttributeDistance`: _static/Unsupervised/AttributeDistance.htm 
     600 
     601.. _`Hierarchical Clustering`: _static/Unsupervised/HierarchicalClustering.htm 
     602 
     603.. _`Interaction Graph`: _static/Unsupervised/InteractionGraph.htm 
     604 
     605.. _`k-Means Clustering`: _static/Unsupervised/k-MeansClustering.htm 
     606 
     607.. _`MDS`: _static/Unsupervised/MDS.htm 
     608 
     609.. _`Network File`: _static/Unsupervised/NetworkFile.htm 
     610 
     611.. _`Net Explorer`: _static/Unsupervised/NetExplorer.htm 
     612 
     613.. _`Network From Distances`: _static/Unsupervised/NetworkFromDistances.htm 
     614 
     615.. _`SOM`: _static/Unsupervised/SOM.htm 
     616 
     617.. _`SOM Visualizer`: _static/Unsupervised/SOMVisualizer.htm 
     618 
     619.. |DistanceFile_icon| image:: ../../../Orange/OrangeWidgets/Unsupervised/icons/DistanceFile_48.png 
     620    :align: middle 
     621    :alt: Distance File 
     622    :width: 48 
     623    :height: 48 
     624 
     625.. |SaveDistanceFile_icon| image:: ../../../Orange/OrangeWidgets/Unsupervised/icons/SaveDistanceFile_48.png 
     626    :align: middle 
     627    :alt: Save Distance File 
     628    :width: 48 
     629    :height: 48 
     630 
     631.. |MatrixTransformation_icon| image:: ../../../Orange/OrangeWidgets/Unsupervised/icons/MatrixTransformation_48.png 
     632    :align: middle 
     633    :alt: Matrix Transformation 
     634    :width: 48 
     635    :height: 48 
     636 
     637.. |DistanceMatrixFilter_icon| image:: ../../../Orange/OrangeWidgets/icons/DistanceFilter.png 
     638    :align: middle 
     639    :alt: Distance Matrix Filter 
     640    :width: 48 
     641    :height: 48 
     642 
     643.. |DistanceMap_icon| image:: ../../../Orange/OrangeWidgets/Unsupervised/icons/DistanceMap_48.png 
     644    :align: middle 
     645    :alt: Distance Map 
     646    :width: 48 
     647    :height: 48 
     648 
     649.. |ExampleDistance_icon| image:: ../../../Orange/OrangeWidgets/icons/ExampleDistance.png 
     650    :align: middle 
     651    :alt: Example Distance 
     652    :width: 48 
     653    :height: 48 
     654 
     655.. |AttributeDistance_icon| image:: ../../../Orange/OrangeWidgets/icons/AttributeDistance.png 
     656    :align: middle 
     657    :alt: Attribute Distance 
     658    :width: 48 
     659    :height: 48 
     660 
     661.. |HierarchicalClustering_icon| image:: ../../../Orange/OrangeWidgets/Unsupervised/icons/HierarchicalClustering_48.png 
     662    :align: middle 
     663    :alt: Hierarchical Clustering 
     664    :width: 48 
     665    :height: 48 
     666 
     667.. |InteractionGraph_icon| image:: ../../../Orange/OrangeWidgets/Unsupervised/icons/InteractionGraph_48.png 
     668    :align: middle 
     669    :alt: Interaction Graph 
     670    :width: 48 
     671    :height: 48 
     672 
     673.. |kMeansClustering_icon| image:: ../../../Orange/OrangeWidgets/Unsupervised/icons/KMeans_48.png 
     674    :align: middle 
     675    :alt: k-Means Clustering 
     676    :width: 48 
     677    :height: 48 
     678 
     679.. |MDS_icon| image:: ../../../Orange/OrangeWidgets/Unsupervised/icons/MDS_48.png 
     680    :align: middle 
     681    :alt: MDS 
     682    :width: 48 
     683    :height: 48 
     684 
     685.. |NetworkFile_icon| image:: ../../../Orange/OrangeWidgets/Unsupervised/icons/NetworkFile_48.png 
     686    :align: middle 
     687    :alt: NetworkFile 
     688    :width: 48 
     689    :height: 48 
     690 
     691.. |NetExplorer_icon| image:: ../../../Orange/OrangeWidgets/Unsupervised/icons/Network_48.png 
     692    :align: middle 
     693    :alt: Net Explorer 
     694    :width: 48 
     695    :height: 48 
     696 
     697.. |NetworkFromDistances_icon| image:: ../../../Orange/OrangeWidgets/Unsupervised/icons/NetworkFromDistances_48.png 
     698    :align: middle 
     699    :alt: Network From Distances 
     700    :width: 48 
     701    :height: 48 
     702 
     703.. |SOM_icon| image:: ../../../Orange/OrangeWidgets/Unsupervised/icons/SOM_48.png 
     704    :align: middle 
     705    :alt: SOM 
     706    :width: 48 
     707    :height: 48 
     708 
     709.. |SOMVisualizer_icon| image:: ../../../Orange/OrangeWidgets/Unsupervised/icons/SOMVisualizer_48.png 
     710    :align: middle 
     711    :alt: SOM Visualizer 
     712    :width: 48 
     713    :height: 48 
     714 
     715 
     716Index: 
    6717 
    7718.. toctree:: 
     
    11722   data/data-table.rst 
    12723   data/select-attributes.rst 
     724    
     725   regression/mean.rst 
     726   regression/linear.rst 
     727   regression/earth.rst 
    13728 
    14729**************** 
  • install-scripts/qt44-3rdparty/27/names.inc

    r8093 r10411  
    1 !define NAME_PYTHON python-2.7.1.msi 
     1!define NAME_PYTHON python-2.7.2.msi 
    22!define NAME_PYTHONWIN pywin32-214.win32-py2.7.exe 
    33!define NAME_MFC mfc71.dll 
     
    1010    !insertmacro installmodule PyQt PyQt-Py2.7-gpl-4.8.1-1.exe $PythonDir\lib\site-packages\PyQt4\*.* 
    1111    !insertmacro installmodule PyQwt PyQt4.Qwt5-5.2.1.win32-py27.exe $PythonDir\lib\site-packages\PyQt4\Qwt5\*.* 
    12     !insertmacro installmodule networkx networkx-1.5.win32.exe $PythonDir\lib\site-packages\networkx\*.* 
     12    !insertmacro installmodule networkx networkx-1.6.win32.exe $PythonDir\lib\site-packages\networkx\*.* 
    1313!macroend 
  • install-scripts/updateAndCall.btm

    r10223 r10413  
    3333REM # 3rd party packages should already be in qt44-3rdparty\%PYVER\ directories 
    3434 
    35 copy /q /s snapshot\install-scripts\qt44-3dparty qt44-3dparty 
     35copy /q /s snapshot\install-scripts\qt44-3rdparty qt44-3rdparty 
    3636 
    3737call callCreateSnapshot.btm 
Note: See TracChangeset for help on using the changeset viewer.