Ignore:
Files:
7 added
4 deleted
50 edited

Legend:

Unmodified
Added
Removed
  • .hgignore

    r10159 r10545  
    2020MANIFEST 
    2121Orange.egg-info 
     22Orange/version.py 
    2223 
    2324# Ignore dot files. 
  • Orange/OrangeCanvas/orngCanvasItems.py

    r9671 r10560  
    99WARNING = 1 
    1010 
    11 #def _graphicsEffect(item): 
    12 #    if hasattr(item, "graphicsEffect"): 
    13 #        return item.graphicsEffect() 
    14 #    else: 
    15 #        return None 
     11def getDropShadow(item): 
     12    if hasattr(item, "graphicsEffect"): 
     13        return item.graphicsEffect() 
     14    else: 
     15        return None 
     16     
     17def setDropShadow(self): 
     18    if qVersion() >= "4.6" and self.canvasDlg.settings["enableCanvasDropShadows"]: 
     19        effect = QGraphicsDropShadowEffect(self.scene()) 
     20        effect.setOffset(QPointF(0.3, 0.5)) 
     21        effect.setBlurRadius(5) 
     22        self.setGraphicsEffect(effect) 
    1623     
    1724class TempCanvasLine(QGraphicsPathItem): 
     
    2633        self.setPen(QPen(QColor(180, 180, 180), 3, Qt.SolidLine)) 
    2734         
    28 #        if qVersion() >= "4.6" and canvasDlg.settings["enableCanvasDropShadows"]: 
    29 #            effect = QGraphicsDropShadowEffect(self.scene()) 
    30 #            effect.setOffset(QPointF(0.0, 0.0)) 
    31 #            effect.setBlurRadius(7) 
    32 #            self.setGraphicsEffect(effect)         
    33          
     35        self.setDropShadow() 
     36     
     37    setDropShadow = setDropShadow 
     38    getDropShadow = getDropShadow 
     39     
    3440    def setStartWidget(self, widget): 
    3541        self.startWidget = widget 
     
    8894        self.hide() 
    8995        self.startWidget = None 
    90         self.endWidget = None  
     96        self.endWidget = None 
     97         
     98        self.prepareGeometryChange() 
     99         
     100        if self.getDropShadow(): 
     101            self.setGraphicsEffect(None) 
     102              
     103        for child in self.childItems(): 
     104            child.hide() 
     105            child.setParentItem(None) 
     106            self.scene().removeItem(child) 
     107             
     108        self.hide() 
    91109        self.scene().removeItem(self) 
    92110 
     
    120138        self.setAcceptHoverEvents(True) 
    121139        self.hoverState = False 
    122          
    123 #        if qVersion() >= "4.6" and canvasDlg.settings["enableCanvasDropShadows"]: 
    124 #            effect = QGraphicsDropShadowEffect(self.scene()) 
    125 #            effect.setOffset(QPointF(0.0, 0.0)) 
    126 #            effect.setBlurRadius(7) 
    127 #            self.setGraphicsEffect(effect) 
    128 #            self.prepareGeometryChange() 
    129140             
    130141        if scene is not None: 
     
    134145             
    135146        QObject.connect(self.outWidget.instance, SIGNAL("dynamicLinkEnabledChanged(PyQt_PyObject, bool)"), self.updateDynamicEnableState) 
    136  
     147         
     148        self.setDropShadow() 
     149         
     150    setDropShadow = setDropShadow 
     151    getDropShadow = getDropShadow 
     152     
    137153    def remove(self): 
    138154        self.hide() 
     
    140156        self.outWidget = None 
    141157        self.inWidget = None 
     158         
     159        self.prepareGeometryChange() 
     160         
     161        if self.getDropShadow(): 
     162            self.setGraphicsEffect(None) 
     163             
     164        for child in self.childItems(): 
     165            child.hide() 
     166            child.setParentItem(None) 
     167            self.scene().removeItem(child) 
     168             
     169        self.hide() 
    142170        self.scene().removeItem(self) 
     171        QApplication.instance().processEvents(QEventLoop.ExcludeUserInputEvents) 
    143172         
    144173    def getEnabled(self): 
     
    193222        return stroke.createStroke(self.path()) 
    194223     
    195 #    def boundingRect(self): 
    196 #        rect = QGraphicsPathItem.boundingRect(self) 
    197 #        if _graphicsEffect(self): 
    198 #            textRect = self.captionItem.boundingRect() ## Should work without this but for some reason if using graphics effects the text gets clipped 
    199 #            textRect.moveTo(self.captionItem.pos()) 
    200 #            return rect.united(textRect) 
    201 #        else: 
    202 #            return rect 
     224    def boundingRect(self): 
     225        rect = QGraphicsPathItem.boundingRect(self) 
     226        if self.getDropShadow(): 
     227            textRect = self.captionItem.boundingRect() ## Should work without this but for some reason if using graphics effects the text gets clipped 
     228            textRect.moveTo(self.captionItem.pos()) 
     229            return rect.united(textRect) 
     230        else: 
     231            return rect 
    203232 
    204233    def paint(self, painter, option, widget = None): 
     
    309338        self.setFlags(QGraphicsItem.ItemIsSelectable)# | QGraphicsItem.ItemIsMovable) 
    310339         
    311 #        if qVersion() >= "4.6" and self.canvasDlg.settings["enableCanvasDropShadows"]: 
    312 #            effect = QGraphicsDropShadowEffect() 
    313 #            effect.setOffset(QPointF(1.1, 3.1)) 
    314 #            effect.setBlurRadius(7) 
    315 #            self.setGraphicsEffect(effect) 
    316 #            self.prepareGeometryChange() 
    317              
    318340        if scene is not None: 
    319341            scene.addItem(self) 
    320  
     342             
     343        self.setDropShadow() 
     344 
     345    setDropShadow = setDropShadow 
     346    getDropShadow = getDropShadow 
     347     
    321348    def resetWidgetSize(self): 
    322349        size = self.canvasDlg.schemeIconSizeList[self.canvasDlg.settings['schemeIconSize']] 
     
    366393            self.instance.setEventHandler(None) 
    367394            self.instance.onDeleteWidget()      # this is a cleanup function that can take care of deleting some unused objects 
    368             try: 
    369                 import sip 
    370                 sip.delete(self.instance) 
    371             except Exception, ex: 
    372                 print >> sys.stderr, "Error deleting the widget: \n%s" % str(ex) 
     395             
     396            # Schedule the widget instance for deletion 
     397            self.instance.deleteLater() 
    373398            self.instance = None 
    374399             
    375             self.scene().removeItem(self) 
    376                  
    377  
     400        self.prepareGeometryChange() 
     401         
     402        if self.getDropShadow(): 
     403            self.setGraphicsEffect(None) 
     404         
     405        for child in self.childItems(): 
     406            child.hide() 
     407            child.setParentItem(None) 
     408            self.scene().removeItem(child) 
     409         
     410        self.hide() 
     411        self.scene().removeItem(self) 
     412         
    378413    def savePosition(self): 
    379414        self.oldPos = self.pos() 
     
    438473        rect = QRectF(QPointF(0, 0), self.widgetSize).adjusted(-11, -6, 11, 6)#.adjusted(-100, -100, 100, 100) #(-10-width, -4, +10+width, +25) 
    439474        rect.setTop(rect.top() - 20 - 21) ## Room for progress bar and warning, error, info icons 
    440 #        if _graphicsEffect(self): 
    441 #            textRect = self.captionItem.boundingRect() ## Should work without this but for some reason if using graphics effects the text gets clipped 
    442 #            textRect.moveTo(self.captionItem.pos())  
    443         return rect 
     475        if self.getDropShadow(): 
     476            textRect = self.captionItem.boundingRect() ## Should work without this but for some reason if using graphics effects the text gets clipped 
     477            textRect.moveTo(self.captionItem.pos())  
     478            return rect.united(textRect) 
     479        else: 
     480            return rect 
    444481 
    445482    # is mouse position inside the left signal channel 
  • Orange/OrangeCanvas/orngDlgs.py

    r10472 r10560  
    461461        generalBox = OWGUI.widgetBox(GeneralTab, "General Options") 
    462462        self.snapToGridCB = OWGUI.checkBox(generalBox, self.settings, "snapToGrid", "Snap widgets to grid", debuggingEnabled = 0) 
    463 #        self.enableCanvasDropShadowsCB = OWGUI.checkBox(generalBox, self.settings, "enableCanvasDropShadows", "Enable drop shadows in canvas", debuggingEnabled = 0) 
     463        self.enableCanvasDropShadowsCB = OWGUI.checkBox(generalBox, self.settings, "enableCanvasDropShadows", "Enable drop shadows in canvas", debuggingEnabled = 0) 
    464464        self.writeLogFileCB  = OWGUI.checkBox(generalBox, self.settings, "writeLogFile", "Save content of the Output window to a log file", debuggingEnabled = 0) 
    465465        self.showSignalNamesCB = OWGUI.checkBox(generalBox, self.settings, "showSignalNames", "Show signal names between widgets", debuggingEnabled = 0) 
  • Orange/OrangeCanvas/orngDoc.py

    r10463 r10560  
    255255        line.outWidget.updateTooltip() 
    256256        line.remove() 
     257         
    257258        if saveTempDoc: 
    258259            self.saveTempDoc() 
     260             
     261        qApp.processEvents(QEventLoop.ExcludeUserInputEvents) 
    259262 
    260263    # remove line, connecting two widgets 
     
    350353        self.widgets.remove(widget) 
    351354        widget.remove() 
     355         
    352356        if saveTempDoc: 
    353357            self.saveTempDoc() 
     358             
     359        qApp.processEvents(QEventLoop.ExcludeUserInputEvents) 
    354360         
    355361        self.update_guide() 
  • Orange/OrangeWidgets/Classify/OWC45Tree.py

    r9671 r10552  
    1111 
    1212from orngWrap import PreprocessedLearner 
     13 
     14import Orange 
     15 
    1316class OWC45Tree(OWWidget): 
    1417    settingsList = ["name", 
     
    2326        self.callbackDeposit = [] 
    2427 
    25         self.inputs = [("Data", ExampleTable, self.setData), ("Preprocess", PreprocessedLearner, self.setPreprocessor)] 
    26         self.outputs = [("Learner", orange.Learner),("Classification Tree", orange.TreeClassifier)]#, ("C45 Tree", orange.C45Classifier)] 
     28        self.inputs = [("Data", ExampleTable, self.setData), 
     29                       ("Preprocess", PreprocessedLearner, self.setPreprocessor)] 
     30         
     31        self.outputs = [("Learner", orange.Learner), 
     32                        ("Classification Tree", Orange.classification.tree.TreeClassifier)]#, ("C45 Tree", orange.C45Classifier)] 
    2733 
    2834        # Settings 
  • Orange/OrangeWidgets/Classify/OWClassificationTree.py

    r9671 r10552  
    3333        OWWidget.__init__(self, parent, signalManager, name, wantMainArea=0, resizingEnabled=0) 
    3434 
    35         self.inputs = [("Data", ExampleTable, self.setData), ("Preprocess", PreprocessedLearner, self.setPreprocessor)] 
    36         self.outputs = [("Learner", orange.TreeLearner), ("Classification Tree", orange.TreeClassifier), ("Classification Tree Graph", Orange.network.Graph)] 
     35        self.inputs = [("Data", ExampleTable, self.setData), 
     36                       ("Preprocess", PreprocessedLearner, self.setPreprocessor)] 
     37         
     38        self.outputs = [("Learner", Orange.classification.tree.TreeLearner), 
     39                        ("Classification Tree", Orange.classification.tree.TreeClassifier), 
     40                        ("Classification Tree Graph", Orange.network.Graph)] 
    3741 
    3842        self.name = 'Classification Tree' 
  • Orange/OrangeWidgets/Classify/OWClassificationTreeGraph.py

    r9671 r10552  
    88from OWTreeViewer2D import * 
    99import OWColorPalette 
     10 
     11import Orange 
    1012 
    1113class PieChart(QGraphicsRectItem): 
     
    201203        OWTreeViewer2D.__init__(self, parent, signalManager, name) 
    202204 
    203         self.inputs = [("Classification Tree", orange.TreeClassifier, self.ctree)] 
     205        self.inputs = [("Classification Tree", Orange.classification.tree.TreeClassifier, self.ctree)] 
    204206        self.outputs = [("Data", ExampleTable)] 
    205207 
  • Orange/OrangeWidgets/Classify/OWClassificationTreeViewer.py

    r10458 r10552  
    1111 
    1212import orngTree 
     13import Orange 
    1314 
    1415class ColumnCallback: 
     
    4647#        self.callbackDeposit = [] 
    4748 
    48         self.inputs = [("Classification Tree", orange.TreeClassifier, self.setClassificationTree)] 
     49        self.inputs = [("Classification Tree", Orange.classification.tree.TreeClassifier, self.setClassificationTree)] 
    4950        self.outputs = [("Data", ExampleTable)] 
    5051 
  • Orange/OrangeWidgets/Data/OWPreprocess.py

    r9671 r10542  
    1717import math 
    1818 
    19 from Orange.preprocess import * 
     19from Orange.data import preprocess 
    2020 
    2121def _gettype(obj): 
     
    7676    def getDiscretizer(self): 
    7777        if self.discInd == 0: 
    78             preprocessor = Preprocessor_discretizeEntropy(method=orange.EntropyDiscretization()) 
     78            preprocessor = preprocess.DiscretizeEntropy(method=orange.EntropyDiscretization()) 
    7979        elif self.discInd in [1, 2]: 
    8080            name, disc, kwds = self.DISCRETIZERS[self.discInd] 
    81             preprocessor = Preprocessor_discretize(method=disc(**dict([(key, getattr(self, key, val)) for key, val in kwds.items()]))) 
     81            preprocessor = preprocess.Discretize(method=disc(**dict([(key, getattr(self, key, val)) for key, 
     82                                                                                              val in kwds.items()]))) 
    8283        elif self.discInd == 3: 
    83             preprocessor = Preprocessor_removeContinuous() 
     84            preprocessor = preprocess.RemoveContinuous() 
    8485        return preprocessor 
    8586     
     
    120121    def getContinuizer(self): 
    121122        if self.contInd in [0, 1, 2, 4, 5]: 
    122             preprocessor = Preprocessor_continuize(multinomialTreatment=self.CONTINUIZERS[self.contInd][1]) 
     123            preprocessor = preprocess.Continuize(multinomialTreatment=self.CONTINUIZERS[self.contInd][1]) 
    123124        elif self.contInd == 3: 
    124             preprocessor = Preprocessor_removeDiscrete() 
     125            preprocessor = preprocess.RemoveDiscrete() 
    125126        return preprocessor 
    126127     
    127128    def setContinuizer(self, continuizer): 
    128         if isinstance(continuizer, Preprocessor_removeDiscrete): 
     129        if isinstance(continuizer, preprocess.RemoveDiscrete): 
    129130            self.contInd = 3 #Ignore all discrete 
    130         elif isinstance(continuizer,Preprocessor_continuize): 
     131        elif isinstance(continuizer,preprocess.Continuize): 
    131132            self.contInd = self.TREATMENT_TO_IND.get(continuizer.multinomialTreatment, 3) 
    132133     
    133     data = _pyqtProperty(Preprocessor_continuize, 
     134    data = _pyqtProperty(preprocess.Continuize, 
    134135                        fget=getContinuizer, 
    135136                        fset=setContinuizer, 
     
    155156        if self.methodInd in [0, 1, 2]: 
    156157            learner = self.IMPUTERS[self.methodInd][1]() 
    157             imputer = Preprocessor_imputeByLearner(learner=learner) 
     158            imputer = preprocess.ImputeByLearner(learner=learner) 
    158159        elif self.methodInd == 3: 
    159             imputer = orange.Preprocessor_dropMissing() 
     160            imputer = preprocess.DropMissing() 
    160161        return imputer 
    161162             
     
    163164    def setImputer(self, imputer): 
    164165        self.methodInd = 0 
    165         if isinstance(imputer, Preprocessor_imputeByLearner): 
     166        if isinstance(imputer, preprocess.ImputeByLearner): 
    166167            learner = imputer.learner 
    167168            dd = dict([(t, i) for i, (_, t) in enumerate(self.IMPUTERS)]) 
    168169            self.methodInd = dd.get(_gettype(learner), 0) 
    169         elif isinstance(imputer, orange.Preprocessor_dropMissing): 
     170        elif isinstance(imputer, preprocess.DropMissing): 
    170171            self.methodInd = 3 
    171172             
    172     data = _pyqtProperty(Preprocessor_imputeByLearner, 
     173    data = _pyqtProperty(preprocess.ImputeByLearner, 
    173174                        fget=getImputer, 
    174175                        fset=setImputer, 
     
    183184                ("Linear SVM weights", orngSVM.MeasureAttribute_SVMWeights)] 
    184185     
    185     FILTERS = [Preprocessor_featureSelection.bestN, 
    186                Preprocessor_featureSelection.bestP] 
     186    FILTERS = [preprocess.FeatureSelection.bestN, 
     187               preprocess.FeatureSelection.bestP] 
    187188     
    188189    def __init__(self, parent=None): 
     
    235236     
    236237    def getFeatureSelection(self): 
    237         return Preprocessor_featureSelection(measure=self.MEASURES[self.measureInd][1], 
     238        return preprocess.FeatureSelection(measure=self.MEASURES[self.measureInd][1], 
    238239                                             filter=self.FILTERS[self.selectBy], 
    239240                                             limit=self.bestP if self.selectBy  else self.bestN) 
    240241     
    241     data = _pyqtProperty(Preprocessor_featureSelection, 
     242    data = _pyqtProperty(preprocess.FeatureSelection, 
    242243                        fget=getFeatureSelection, 
    243244                        fset=setFeatureSelection, 
     
    245246         
    246247class SampleEditor(BaseEditor): 
    247     FILTERS = [Preprocessor_sample.selectNRandom, 
    248                Preprocessor_sample.selectPRandom] 
     248    FILTERS = [preprocess.Sample.selectNRandom, 
     249               preprocess.Sample.selectPRandom] 
    249250    def __init__(self, parent=None): 
    250251        BaseEditor.__init__(self, parent) 
     
    278279         
    279280    def getSampler(self): 
    280         return Preprocessor_sample(filter=self.FILTERS[self.methodInd], 
     281        return preprocess.Sample(filter=self.FILTERS[self.methodInd], 
    281282                                   limit=self.sampleN if self.methodInd == 0 else self.sampleP) 
    282283     
     
    291292        self.updateSpinStates() 
    292293             
    293     data = _pyqtProperty(Preprocessor_sample, 
     294    data = _pyqtProperty(preprocess.Sample, 
    294295                        fget=getSampler, 
    295296                        fset=setSampler, 
     
    302303         
    303304    #Preprocessor name replacement rules 
    304     REPLACE = {Preprocessor_discretize: "Discretize ({0.method})", 
    305                Preprocessor_discretizeEntropy: "Discretize (entropy)", 
    306                Preprocessor_removeContinuous: "Discretize (remove continuous)", 
    307                Preprocessor_continuize: "Continuize ({0.multinomialTreatment})", 
    308                Preprocessor_removeDiscrete: "Continuize (remove discrete)", 
    309                Preprocessor_impute: "Impute ({0.model})", 
    310                Preprocessor_imputeByLearner: "Impute ({0.learner})", 
    311                Preprocessor_dropMissing: "Remove missing", 
    312                Preprocessor_featureSelection: "Feature selection ({0.measure}, {0.filter}, {0.limit})", 
    313                Preprocessor_sample: "Sample ({0.filter}, {0.limit})", 
     305    REPLACE = {preprocess.Discretize: "Discretize ({0.method})", 
     306               preprocess.DiscretizeEntropy: "Discretize (entropy)", 
     307               preprocess.RemoveContinuous: "Discretize (remove continuous)", 
     308               preprocess.Continuize: "Continuize ({0.multinomialTreatment})", 
     309               preprocess.RemoveDiscrete: "Continuize (remove discrete)", 
     310               preprocess.Impute: "Impute ({0.model})", 
     311               preprocess.ImputeByLearner: "Impute ({0.learner})", 
     312               preprocess.DropMissing: "Remove missing", 
     313               preprocess.FeatureSelection: "Feature selection ({0.measure}, {0.filter}, {0.limit})", 
     314               preprocess.Sample: "Sample ({0.filter}, {0.limit})", 
    314315               orange.EntropyDiscretization: "entropy", 
    315316               orange.EquiNDiscretization: "freq, {0.numberOfIntervals}", 
     
    447448     
    448449    # Default preprocessors 
    449     preprocessors =[("Discretize", Preprocessor_discretizeEntropy, {}), 
    450                     ("Continuize", Preprocessor_continuize, {}), 
    451                     ("Impute", Preprocessor_impute, {}), 
    452                     ("Feature selection", Preprocessor_featureSelection, {}), 
    453                     ("Sample", Preprocessor_sample, {})] 
     450    preprocessors =[("Discretize", preprocess.DiscretizeEntropy, {}), 
     451                    ("Continuize", preprocess.Continuize, {}), 
     452                    ("Impute", preprocess.Impute, {}), 
     453                    ("Feature selection", preprocess.FeatureSelection, {}), 
     454                    ("Sample", preprocess.Sample, {})] 
    454455     
    455456    # Editor widgets for preprocessors 
    456     EDITORS = {Preprocessor_discretize: DiscretizeEditor, 
    457                Preprocessor_discretizeEntropy: DiscretizeEditor, 
    458                Preprocessor_removeContinuous: DiscretizeEditor, 
    459                Preprocessor_continuize: ContinuizeEditor, 
    460                Preprocessor_removeDiscrete: ContinuizeEditor, 
    461                Preprocessor_impute: ImputeEditor, 
    462                Preprocessor_imputeByLearner: ImputeEditor, 
    463                Preprocessor_dropMissing: ImputeEditor, 
    464                Preprocessor_featureSelection: FeatureSelectEditor, 
    465                Preprocessor_sample: SampleEditor, 
     457    EDITORS = {preprocess.Discretize: DiscretizeEditor, 
     458               preprocess.DiscretizeEntropy: DiscretizeEditor, 
     459               preprocess.RemoveContinuous: DiscretizeEditor, 
     460               preprocess.Continuize: ContinuizeEditor, 
     461               preprocess.RemoveDiscrete: ContinuizeEditor, 
     462               preprocess.Impute: ImputeEditor, 
     463               preprocess.ImputeByLearner: ImputeEditor, 
     464               preprocess.DropMissing: ImputeEditor, 
     465               preprocess.FeatureSelection: FeatureSelectEditor, 
     466               preprocess.Sample: SampleEditor, 
    466467               type(None): QWidget} 
    467468     
     
    476477         
    477478#        self.allSchemas = [PreprocessorSchema("Default" , [Preprocessor_discretize(method=orange.EntropyDiscretization()), Preprocessor_dropMissing()])] 
    478         self.allSchemas = [("Default" , [Preprocessor_discretizeEntropy(method=orange.EntropyDiscretization()), Preprocessor_dropMissing()], 0)] 
     479        self.allSchemas = [("Default" , [preprocess.DiscretizeEntropy(method=orange.EntropyDiscretization()), 
     480                                         preprocess.DropMissing()], 0)] 
    479481         
    480482        self.lastSelectedSchemaIndex = 0 
  • Orange/OrangeWidgets/Regression/OWRegressionTree.py

    r9671 r10552  
    1515 
    1616from orngWrap import PreprocessedLearner 
     17 
     18import Orange 
    1719 
    1820class OWRegressionTree(OWWidget): 
     
    3840        self.preprocessor = None 
    3941 
    40         self.inputs=[("Data",ExampleTable,self.dataset), ("Preprocess", PreprocessedLearner, self.setPreprocessor)] 
    41         self.outputs=[("Learner",orange.Learner),("Regressor",orange.Classifier),("Regression Tree",orange.TreeClassifier)] 
     42        self.inputs=[("Data",ExampleTable,self.dataset), 
     43                     ("Preprocess", PreprocessedLearner, self.setPreprocessor)] 
     44         
     45        self.outputs=[("Learner", orange.Learner), 
     46                      ("Regressor", orange.Classifier), 
     47                      ("Regression Tree", Orange.regression.tree.TreeClassifier)] 
    4248 
    4349        ## 
  • Orange/OrangeWidgets/Regression/OWRegressionTreeViewer2D.py

    r9671 r10552  
    99import re 
    1010 
    11          
     11import Orange 
     12 
    1213class RegressionTreeNode(GraphicsNode): 
    1314    def __init__(self, attr, tree, parent=None, *args): 
     
    126127        OWTreeViewer2D.__init__(self, parent, signalManager, name) 
    127128 
    128         self.inputs = [("Classification Tree", orange.TreeClassifier, self.ctree)] 
     129        self.inputs = [("Classification Tree", Orange.regression.tree.TreeClassifier, self.ctree)] 
    129130        self.outputs = [("Data", ExampleTable)] 
    130131         
  • Orange/OrangeWidgets/Visualize Qt/OWLinProj3DPlot.py

    r9671 r10542  
    55from plot import OWPoint 
    66 
    7 from Orange.preprocess.scaling import ScaleLinProjData3D, get_variable_values_sorted 
     7from Orange.data.preprocess.scaling import ScaleLinProjData3D, get_variable_values_sorted 
    88import orange 
    99Discrete = orange.VarTypes.Discrete 
  • Orange/OrangeWidgets/Visualize Qt/OWScatterPlot3D.py

    r9671 r10542  
    1616Continuous = orange.VarTypes.Continuous 
    1717 
    18 from Orange.preprocess.scaling import get_variable_values_sorted 
     18from Orange.data.preprocess.scaling import get_variable_values_sorted 
    1919 
    2020import OWGUI 
  • Orange/OrangeWidgets/plot/owtools.py

    r9671 r10542  
    4141from owpalette import OWPalette 
    4242 
    43 from Orange.preprocess.scaling import get_variable_values_sorted 
     43from Orange.data.preprocess.scaling import get_variable_values_sorted 
    4444import orangeom 
    4545import ColorPalette 
  • Orange/__init__.py

    r10491 r10549  
    3131_import("data.sample") 
    3232_import("data.outliers") 
     33_import("data.preprocess") 
     34_import("data.preprocess.scaling") 
    3335_import("data.utils") 
    3436_import("data.discretization") 
     
    8082_import("ensemble.boosting") 
    8183_import("ensemble.forest") 
     84_import("ensemble.stacking") 
    8285 
    8386_import("regression") 
     
    102105 
    103106_import("associate") 
    104  
    105 _import("preprocess") 
    106 _import("preprocess.scaling") 
    107107 
    108108_import("distance") 
  • Orange/classification/logreg.py

    r10387 r10542  
    11import Orange 
    22from Orange.misc import deprecated_keywords, deprecated_members 
     3from Orange.data import preprocess 
    34import math 
     5 
     6 
    47from numpy import dot, array, identity, reshape, diagonal, \ 
    58    transpose, concatenate, sqrt, sign 
     
    746749        examples = self.imputer(examples)(examples) 
    747750    if getattr(self, "removeMissing", 0): 
    748         examples = Orange.core.Preprocessor_dropMissing(examples) 
    749     continuizer = Orange.preprocess.DomainContinuizer(zeroBased=1, 
    750         continuousTreatment=Orange.preprocess.DomainContinuizer.Leave, 
    751                                            multinomialTreatment = Orange.preprocess.DomainContinuizer.FrequentIsBase, 
    752                                            classTreatment = Orange.preprocess.DomainContinuizer.Ignore) 
     751        examples = preprocess.DropMissing(examples) 
     752    continuizer = preprocess.DomainContinuizer(zeroBased=1, 
     753        continuousTreatment=preprocess.DomainContinuizer.Leave, 
     754                                           multinomialTreatment = preprocess.DomainContinuizer.FrequentIsBase, 
     755                                           classTreatment = preprocess.DomainContinuizer.Ignore) 
    753756    attr = [] 
    754757    remain_attr = examples.domain.features[:] 
  • Orange/classification/svm/__init__.py

    r10369 r10542  
    1818                        SVMClassifierSparse 
    1919 
    20 from Orange.preprocess import Preprocessor_impute, \ 
    21                               Preprocessor_continuize, \ 
    22                               Preprocessor_preprocessorList, \ 
    23                               DomainContinuizer 
     20from Orange.data import preprocess 
    2421 
    2522from Orange import feature as variable 
     
    243240 
    244241    def _normalize(self, data): 
    245         dc = Orange.core.DomainContinuizer() 
    246         dc.class_treatment = Orange.core.DomainContinuizer.Ignore 
    247         dc.continuous_treatment = Orange.core.DomainContinuizer.NormalizeBySpan 
    248         dc.multinomial_treatment = Orange.core.DomainContinuizer.NValues 
     242        dc = preprocess.DomainContinuizer() 
     243        dc.class_treatment = preprocess.DomainContinuizer.Ignore 
     244        dc.continuous_treatment = preprocess.DomainContinuizer.NormalizeBySpan 
     245        dc.multinomial_treatment = preprocess.DomainContinuizer.NValues 
    249246        newdomain = dc(data) 
    250247        return data.translate(newdomain) 
     
    308305    def _normalize(self, data): 
    309306        if self.use_non_meta: 
    310             dc = Orange.core.DomainContinuizer() 
    311             dc.class_treatment = Orange.core.DomainContinuizer.Ignore 
    312             dc.continuous_treatment = Orange.core.DomainContinuizer.NormalizeBySpan 
    313             dc.multinomial_treatment = Orange.core.DomainContinuizer.NValues 
     307            dc = preprocess.DomainContinuizer() 
     308            dc.class_treatment = preprocess.DomainContinuizer.Ignore 
     309            dc.continuous_treatment = preprocess.DomainContinuizer.NormalizeBySpan 
     310            dc.multinomial_treatment = preprocess.DomainContinuizer.NValues 
    314311            newdomain = dc(data) 
    315312            data = data.translate(newdomain) 
     
    332329 
    333330    def learn_classifier(self, data): 
    334         transformer = Orange.core.DomainContinuizer() 
    335         transformer.multinomialTreatment = Orange.core.DomainContinuizer.NValues 
     331        transformer = preprocess.DomainContinuizer() 
     332        transformer.multinomialTreatment = preprocess.DomainContinuizer.NValues 
    336333        transformer.continuousTreatment = \ 
    337             Orange.core.DomainContinuizer.NormalizeBySpan 
    338         transformer.classTreatment = Orange.core.DomainContinuizer.Ignore 
     334            preprocess.DomainContinuizer.NormalizeBySpan 
     335        transformer.classTreatment = preprocess.DomainContinuizer.Ignore 
    339336        newdomain = transformer(data) 
    340337        newexamples = data.translate(newdomain) 
     
    371368    # Construct and return a default preprocessor for use by 
    372369    # Orange.core.LinearLearner learner. 
    373     impute = Preprocessor_impute() 
    374     cont = Preprocessor_continuize(multinomialTreatment= 
    375                                    DomainContinuizer.AsOrdinal) 
    376     preproc = Preprocessor_preprocessorList(preprocessors= 
     370    impute = preprocess.Impute() 
     371    cont = preprocess.Continuize(multinomialTreatment= 
     372                                   preprocess.DomainContinuizer.AsOrdinal) 
     373    preproc = preprocess.PreprocessorList(preprocessors= 
    377374                                            [impute, cont]) 
    378375    return preproc 
  • Orange/clustering/mixture.py

    r9976 r10542  
    290290         
    291291    def __call__(self, data, weight_id=None): 
    292         from Orange.preprocess import Preprocessor_impute, DomainContinuizer 
     292        from Orange.data import preprocess 
     293        #import Preprocessor_impute, DomainContinuizer 
    293294#        data = Preprocessor_impute(data) 
    294         dc = DomainContinuizer() 
    295         dc.multinomial_treatment = DomainContinuizer.AsOrdinal 
    296         dc.continuous_treatment = DomainContinuizer.NormalizeByVariance 
    297         dc.class_treatment = DomainContinuizer.Ignore 
     295        dc = preprocess.DomainContinuizer() 
     296        dc.multinomial_treatment = preprocess.DomainContinuizer.AsOrdinal 
     297        dc.continuous_treatment = preprocess.DomainContinuizer.NormalizeByVariance 
     298        dc.class_treatment = preprocess.DomainContinuizer.Ignore 
    298299        domain = dc(data) 
    299300        data = data.translate(domain) 
     
    308309#        array /= std.reshape((1, -1)) 
    309310#        means /= std.reshape((1, -1)) 
    310         solver = EMSolver(array, numpy.ones((self.n)) / self.n, 
     311        solver = EMSolver(array, numpy.ones(self.n) / self.n, 
    311312                          means, correlations) 
    312313        solver.run() 
  • Orange/data/io.py

    r10255 r10559  
    451451        elif index == -1: 
    452452            res.append(str[start:]) 
    453     return res 
     453    return [r.replace(escape + split_str, split_str) for r in res] 
    454454 
    455455def is_standard_var_def(cell): 
     
    480480    elif cell == "": 
    481481        return variable.Descriptor 
    482     elif len(cell.split(",")) > 1: 
    483         return variable.Discrete, cell.split(",") 
     482    elif len(split_escaped_str(cell, " ")) > 1: 
     483        return variable.Discrete, split_escaped_str(cell, " ") 
    484484    else: 
    485485        raise ValueError("Unknown variable type definition %r." % cell) 
     
    531531            specifier = "class" 
    532532            items = items[1:] 
     533        elif items[0] == "multiclass": 
     534            specifier = "multiclass" 
     535            items = items[1:] 
     536        elif items[0] in ["i", "ignore"]: 
     537            specifier = "ignore" 
     538            items = items[1:] 
    533539        return specifier, dict(map(_var_attribute_label_parse, items)) 
    534540    else: 
     
    536542 
    537543def var_attributes(row): 
    538     """ Return variable specifiers and label definitions for row 
     544    """ Return variable specifiers and label definitions for row. 
    539545    """ 
    540546    return map(var_attribute, row) 
     
    568574    if n is None: 
    569575        n = len(values) or 1 
    570     return (float(cont) / n) > cutoff 
     576    return (float(cont) / n) >= cutoff 
    571577 
    572578 
     
    574580    """ Is variable with ``values`` in column (``n`` rows) a discrete variable.  
    575581    """ 
    576     return not is_variable_cont(values, n) 
    577  
    578 def is_variable_string(values, n=None, cutuff=0.1): 
     582    return not is_variable_cont(values, n, cutoff=1.0 - cutoff) 
     583 
     584def is_variable_string(values, n=None, cutuff=0.75): 
    579585    """ Is variable with ``values`` in column (``n`` rows) a string variable.  
    580586    """ 
    581     return False 
    582  
    583 def load_csv(file, create_new_on=MakeStatus.Incompatible, **kwargs): 
     587    if n is None: 
     588        n = len(values) 
     589    return float(len(set(values))) / (n or 1.0) > cutoff 
     590 
     591def load_csv(file, create_new_on=MakeStatus.Incompatible,  
     592             delimiter=None, quotechar=None, escapechar=None, 
     593             skipinitialspace=None, has_header=None,  
     594             has_types=None, has_annotations=None, DK=None, **kwargs): 
    584595    """ Load an Orange.data.Table from s csv file. 
    585596    """ 
     
    589600    sample = file.read(5 * 2 ** 20) # max 5MB sample TODO: What if this is not enough. Try with a bigger sample 
    590601    dialect = snifer.sniff(sample) 
    591     has_header = snifer.has_header(sample) 
     602     
     603    if has_header is None: 
     604        has_header = snifer.has_header(sample) 
     605     
    592606    file.seek(0) # Rewind 
    593     reader = csv.reader(file, dialect=dialect) 
     607     
     608    def kwparams(**kwargs): 
     609        """Return not None kwargs. 
     610        """ 
     611        return dict([(k, v) for k, v in kwargs.items() if v is not None]) 
     612     
     613    fmtparam = kwparams(delimiter=delimiter, 
     614                        quotechar=quotechar, 
     615                        escapechar=escapechar, 
     616                        skipinitialspace=skipinitialspace) 
     617     
     618    reader = csv.reader(file, dialect=dialect, 
     619                        **fmtparam) 
    594620 
    595621    header = types = var_attrs = None 
    596622 
    597 #    if not has_header: 
    598 #        raise ValueError("No header in the data file.") 
    599  
    600     header = reader.next() 
    601  
    602     if header: 
    603         # Try to get variable definitions 
    604         types_row = reader.next() 
    605         if is_var_types_row(types_row): 
    606             types = var_types(types_row) 
    607  
    608     if types: 
    609         # Try to get the variable attributes 
    610         # (third line in the standard orange tab format). 
    611         labels_row = reader.next() 
    612         if is_var_attributes_row(labels_row): 
    613             var_attrs = var_attributes(labels_row) 
    614  
    615     # If definitions not present fill with blanks 
     623    row = first_row = reader.next() 
     624     
     625    if has_header: 
     626        header = row 
     627        # Eat this row and move to the next 
     628        row = reader.next() 
     629 
     630    # Guess types row 
     631    if has_types is None: 
     632        has_types = has_header and is_var_types_row(row) 
     633         
     634    if has_types: 
     635        types = var_types(row) 
     636        # Eat this row and move to the next 
     637        row = reader.next() 
     638 
     639    # Guess variable annotations row 
     640    if has_annotations is None: 
     641        has_annotations = has_header and has_types and \ 
     642                          is_var_attributes_row(row) 
     643         
     644    if has_annotations: 
     645        labels_row = row 
     646        var_attrs = var_attributes(row) 
     647        # Eat this row and move to the next 
     648        row = reader.next() 
     649 
     650    if not header: 
     651        # Create a default header 
     652        header = ["F_%i" % i for i in range(len(first_row))] 
     653         
    616654    if not types: 
     655        # Create blank variable types 
    617656        types = [None] * len(header) 
     657         
    618658    if not var_attrs: 
     659        # Create blank variable attributes 
    619660        var_attrs = [None] * len(header) 
    620661 
    621662    # start from the beginning 
    622663    file.seek(0) 
    623     reader = csv.reader(file, dialect=dialect) 
    624     for defined in [header, types, var_attrs]: 
    625         if any(defined): # skip definition rows if present in the file 
     664    reader = csv.reader(file, dialect=dialect, **fmtparam) 
     665     
     666    for defined in [has_header, has_types, has_annotations]: 
     667        if defined:  
     668            # skip definition rows if present in the file 
    626669            reader.next() 
    627  
     670     
    628671    variables = [] 
    629672    undefined_vars = [] 
     673    # Missing value flags  
     674    missing_flags = DK.split(",") if DK is not None else ["?", "", "NA", "~", "*"] 
     675    missing_map = dict.fromkeys(missing_flags, "?") 
     676    missing_translate = lambda val: missing_map.get(val, val) 
     677     
     678    # Create domain variables or corresponding place holders 
    630679    for i, (name, var_t) in enumerate(zip(header, types)): 
    631         if var_t == variable.Discrete:# We do not have values yet. 
     680        if var_t == variable.Discrete: 
     681            # We do not have values yet 
    632682            variables.append(_disc_placeholder(name)) 
    633683            undefined_vars.append((i, variables[-1])) 
     
    641691            var_t, values = var_t 
    642692            if var_t == variable.Discrete: 
     693                # We have values for discrete variable 
    643694                variables.append(make(name, Orange.feature.Type.Discrete, values, [], create_new_on)) 
    644695            elif var_t == variable.Python: 
     696                # Python variables are not supported yet 
    645697                raise NotImplementedError() 
    646698        elif var_t is None: 
     699            # Unknown variable type, to be deduced at the end 
    647700            variables.append(_var_placeholder(name)) 
    648701            undefined_vars.append((i, variables[-1])) 
    649702 
    650703    data = [] 
     704    # Read all the rows 
    651705    for row in reader: 
    652         data.append(row) 
    653         for ind, var_def in undefined_vars: 
    654             var_def.values.add(row[ind]) 
    655  
     706        # check for final newline. 
     707        if row: 
     708            row = map(missing_translate, row) 
     709            data.append(row) 
     710            # For undefined variables collect all their values 
     711            for ind, var_def in undefined_vars: 
     712                var_def.values.add(row[ind]) 
     713     
     714    # Process undefined variables now that we can deduce their type  
    656715    for ind, var_def in undefined_vars: 
    657716        values = var_def.values - set(["?", ""]) # TODO: Other unknown strings? 
     
    660719            variables[ind] = make(var_def.name, Orange.feature.Type.Discrete, [], values, create_new_on) 
    661720        elif isinstance(var_def, _var_placeholder): 
    662             if is_variable_cont(values): 
     721            if is_variable_cont(values, cutoff=1.0): 
    663722                variables[ind] = make(var_def.name, Orange.feature.Type.Continuous, [], [], create_new_on) 
    664             elif is_variable_discrete(values): 
     723            elif is_variable_discrete(values, cutoff=0.0): 
    665724                variables[ind] = make(var_def.name, Orange.feature.Type.Discrete, [], values, create_new_on) 
    666725            elif is_variable_string(values): 
    667726                variables[ind] = make(var_def.name, Orange.feature.Type.String, [], [], create_new_on) 
    668727            else: 
    669                 raise ValueError("Strange column in the data") 
    670  
    671     vars = [] 
    672     vars_load_status = [] 
     728                # Treat it as a string anyway 
     729                variables[ind] = make(var_def.name, Orange.feature.Type.String, [], [], create_new_on) 
     730 
    673731    attribute_load_status = [] 
    674732    meta_attribute_load_status = {} 
    675733    class_var_load_status = [] 
    676     for var, status in vars: 
    677         vars.append(var) 
    678         vars_load_status.append(status) 
     734    multiclass_var_load_status = [] 
    679735 
    680736    attributes = [] 
    681737    class_var = [] 
     738    class_vars = [] 
    682739    metas = {} 
    683740    attribute_indices = [] 
    684741    variable_indices = [] 
    685742    class_indices = [] 
     743    multiclass_indices = [] 
    686744    meta_indices = [] 
     745    ignore_indices = [] 
    687746    for i, ((var, status), var_attr) in enumerate(zip(variables, var_attrs)): 
    688747        if var_attr: 
     
    692751                class_var_load_status.append(status) 
    693752                class_indices.append(i) 
     753            elif flag == "multiclass": 
     754                class_vars.append(var) 
     755                multiclass_var_load_status.append(status) 
     756                multiclass_indices.append(i) 
    694757            elif flag == "meta": 
    695758                mid = Orange.feature.Descriptor.new_meta_id() 
     
    697760                meta_attribute_load_status[mid] = status 
    698761                meta_indices.append((i, var)) 
     762            elif flag == "ignore": 
     763                ignore_indices.append(i) 
    699764            else: 
    700765                attributes.append(var) 
     
    709774    if len(class_var) > 1: 
    710775        raise ValueError("Multiple class variables defined") 
     776    if class_var and class_vars: 
     777        raise ValueError("Both 'class' and 'multiclass' used.") 
    711778 
    712779    class_var = class_var[0] if class_var else None 
    713  
     780     
    714781    attribute_load_status += class_var_load_status 
    715782    variable_indices = attribute_indices + class_indices 
    716     domain = Orange.data.Domain(attributes, class_var) 
     783    domain = Orange.data.Domain(attributes, class_var, class_vars=class_vars) 
    717784    domain.add_metas(metas) 
    718785    normal = [[row[i] for i in variable_indices] for row in data] 
    719786    meta_part = [[row[i] for i, _ in meta_indices] for row in data] 
     787    multiclass_part = [[row[i] for i in multiclass_indices] for row in data] 
    720788    table = Orange.data.Table(domain, normal) 
    721     for ex, m_part in zip(table, meta_part): 
     789    for ex, m_part, mc_part in zip(table, meta_part, multiclass_part): 
    722790        for (column, var), val in zip(meta_indices, m_part): 
    723791            ex[var] = var(val) 
     792        if mc_part: 
     793            ex.set_classes(mc_part) 
    724794 
    725795    table.setattr("metaAttributeLoadStatus", meta_attribute_load_status) 
     
    751821        for v in all_vars: 
    752822            if isinstance(v, variable.Discrete): 
    753                 type_cells.append(",".join(v.values)) 
     823                escaped_values = [val.replace(" ", r"\ ") for val in v.values] 
     824                type_cells.append(" ".join(escaped_values)) 
    754825            elif isinstance(v, variable.Continuous): 
    755826                type_cells.append("continuous") 
  • Orange/doc/extend-widgets/owplot_example.py

    r9671 r10542  
    55import random 
    66import orange 
    7 from Orange.preprocess.scaling import get_variable_values_sorted 
     7from Orange.data.preprocess.scaling import get_variable_values_sorted 
    88 
    99class BasicPlot(OWPlot): 
  • Orange/ensemble/__init__.py

    r9994 r10540  
    1 """ 
    2  
    3 .. index:: ensemble 
    4  
    5 Module Orange.ensemble implements Breiman's bagging and Random Forest,  
    6 and Freund and Schapire's boosting algorithms. 
    7  
    8  
    9 ******* 
    10 Bagging 
    11 ******* 
    12  
    13 .. index:: bagging 
    14 .. index:: 
    15    single: ensemble; ensemble 
    16  
    17 .. autoclass:: Orange.ensemble.bagging.BaggedLearner 
    18    :members: 
    19    :show-inheritance: 
    20  
    21 .. autoclass:: Orange.ensemble.bagging.BaggedClassifier 
    22    :members: 
    23    :show-inheritance: 
    24  
    25 ******** 
    26 Boosting 
    27 ******** 
    28  
    29 .. index:: boosting 
    30 .. index:: 
    31    single: ensemble; boosting 
    32  
    33  
    34 .. autoclass:: Orange.ensemble.boosting.BoostedLearner 
    35   :members: 
    36   :show-inheritance: 
    37  
    38 .. autoclass:: Orange.ensemble.boosting.BoostedClassifier 
    39    :members: 
    40    :show-inheritance: 
    41  
    42 Example 
    43 ======= 
    44 Let us try boosting and bagging on Lymphography data set and use TreeLearner 
    45 with post-pruning as a base learner. For testing, we use 10-fold cross 
    46 validation and observe classification accuracy. 
    47  
    48 :download:`ensemble.py <code/ensemble.py>` 
    49  
    50 .. literalinclude:: code/ensemble.py 
    51   :lines: 7- 
    52  
    53 Running this script, we may get something like:: 
    54  
    55     Classification Accuracy: 
    56                tree: 0.764 
    57        boosted tree: 0.770 
    58         bagged tree: 0.790 
    59  
    60  
    61 ************* 
    62 Random Forest 
    63 ************* 
    64  
    65 .. index:: random forest 
    66 .. index:: 
    67    single: ensemble; random forest 
    68     
    69 .. autoclass:: Orange.ensemble.forest.RandomForestLearner 
    70   :members: 
    71   :show-inheritance: 
    72  
    73 .. autoclass:: Orange.ensemble.forest.RandomForestClassifier 
    74   :members: 
    75   :show-inheritance: 
    76  
    77  
    78 Example 
    79 ======== 
    80  
    81 The following script assembles a random forest learner and compares it 
    82 to a tree learner on a liver disorder (bupa) and housing data sets. 
    83  
    84 :download:`ensemble-forest.py <code/ensemble-forest.py>` 
    85  
    86 .. literalinclude:: code/ensemble-forest.py 
    87   :lines: 7- 
    88  
    89 Notice that our forest contains 50 trees. Learners are compared through  
    90 3-fold cross validation:: 
    91  
    92     Classification: bupa.tab 
    93     Learner  CA     Brier  AUC 
    94     tree     0.586  0.829  0.575 
    95     forest   0.710  0.392  0.752 
    96     Regression: housing.tab 
    97     Learner  MSE    RSE    R2 
    98     tree     23.708  0.281  0.719 
    99     forest   11.988  0.142  0.858 
    100  
    101 Perhaps the sole purpose of the following example is to show how to 
    102 access the individual classifiers once they are assembled into the 
    103 forest, and to show how we can assemble a tree learner to be used in 
    104 random forests. In the following example the best feature for decision 
    105 nodes is selected among three randomly chosen features, and maxDepth 
    106 and minExamples are both set to 5. 
    107  
    108 :download:`ensemble-forest2.py <code/ensemble-forest2.py>` 
    109  
    110 .. literalinclude:: code/ensemble-forest2.py 
    111   :lines: 7- 
    112  
    113 Running the above code would report on sizes (number of nodes) of the tree 
    114 in a constructed random forest. 
    115  
    116      
    117 Score Feature 
    118 ============= 
    119  
    120 L. Breiman (2001) suggested the possibility of using random forests as a 
    121 non-myopic measure of feature importance. 
    122  
    123 The assessment of feature relevance with random forests is based on the 
    124 idea that randomly changing the value of an important feature greatly 
    125 affects instance's classification, while changing the value of an 
    126 unimportant feature does not affect it much. Implemented algorithm 
    127 accumulates feature scores over given number of trees. Importance of 
    128 all features for a single tree are computed as: correctly classified  
    129 OOB instances minus correctly classified OOB instances when the feature is 
    130 randomly shuffled. The accumulated feature scores are divided by the 
    131 number of used trees and multiplied by 100 before they are returned. 
    132  
    133 .. autoclass:: Orange.ensemble.forest.ScoreFeature 
    134   :members: 
    135  
    136 Computation of feature importance with random forests is rather slow and 
    137 importances for all features need to be computes simultaneously. When it  
    138 is called to compute a quality of certain feature, it computes qualities 
    139 for all features in the dataset. When called again, it uses the stored  
    140 results if the domain is still the same and the data table has not 
    141 changed (this is done by checking the data table's version and is 
    142 not foolproof; it will not detect if you change values of existing instances, 
    143 but will notice adding and removing instances; see the page on  
    144 :class:`Orange.data.Table` for details). 
    145  
    146 :download:`ensemble-forest-measure.py <code/ensemble-forest-measure.py>` 
    147  
    148 .. literalinclude:: code/ensemble-forest-measure.py 
    149   :lines: 7- 
    150  
    151 Corresponding output:: 
    152  
    153     DATA:iris.tab 
    154  
    155     first: 3.91, second: 0.38 
    156  
    157     different random seed 
    158     first: 3.39, second: 0.46 
    159  
    160     All importances: 
    161        sepal length:   3.39 
    162         sepal width:   0.46 
    163        petal length:  30.15 
    164         petal width:  31.98 
    165  
    166 References 
    167 ----------- 
    168 * L Breiman. Bagging Predictors. `Technical report No. 421 \ 
    169     <http://www.stat.berkeley.edu/tech-reports/421.ps.Z>`_. University of \ 
    170     California, Berkeley, 1994. 
    171 * Y Freund, RE Schapire. `Experiments with a New Boosting Algorithm \ 
    172     <http://citeseer.ist.psu.edu/freund96experiments.html>`_. Machine \ 
    173     Learning: Proceedings of the Thirteenth International Conference (ICML'96), 1996. 
    174 * JR Quinlan. `Boosting, bagging, and C4.5 \ 
    175     <http://www.rulequest.com/Personal/q.aaai96.ps>`_ . In Proc. of 13th \ 
    176     National Conference on Artificial Intelligence (AAAI'96). pp. 725-730, 1996.  
    177 * L Brieman. `Random Forests \ 
    178     <http://www.springerlink.com/content/u0p06167n6173512/>`_.\ 
    179     Machine Learning, 45, 5-32, 2001.  
    180 * M Robnik-Sikonja. `Improving Random Forests \ 
    181     <http://lkm.fri.uni-lj.si/rmarko/papers/robnik04-ecml.pdf>`_. In \ 
    182     Proc. of European Conference on Machine Learning (ECML 2004),\ 
    183     pp. 359-370, 2004. 
    184 """ 
    185  
    186 __all__ = ["bagging", "boosting", "forest"] 
     1__all__ = ["bagging", "boosting", "forest", "stacking"] 
    1872__docformat__ = 'restructuredtext' 
    188 import Orange.core as orange 
  • Orange/ensemble/forest.py

    r10525 r10565  
    3636    __new__ = Orange.misc._orange__new__(Orange.core.Learner) 
    3737 
    38     def __init__(self, base, rand): 
     38    def __init__(self, base=None, rand=None): #pickle needs an empty init 
    3939        self.base = base 
    4040        self.attributes = None 
     
    5353class RandomForestLearner(Orange.core.Learner): 
    5454    """ 
    55     Just like in bagging, classifiers in random forests are trained from bootstrap 
    56     samples of training data. Here, the classifiers are trees. However, to increase 
    57     randomness, at each node of the tree the best feature is 
    58     chosen from a subset of features in the data. We closely follow the 
    59     original algorithm (Brieman, 2001) both in implementation and parameter 
     55    Trains an ensemble predictor consisting of trees trained 
     56    on bootstrap 
     57    samples of training data. To increase 
     58    randomness, the tree learner considers only a subset of 
     59    candidate features at each node. The algorithm closely follows 
     60    the original procedure (Brieman, 2001) both in implementation and parameter 
    6061    defaults. 
    6162         
     
    6465 
    6566    :param attributes: number of randomly drawn features among 
    66             which to select the best to split the nodes in tree 
    67             induction. The default, None, means the square root of 
     67            which to select the best one to split the data sets 
     68            in tree nodes. The default, None, means the square root of 
    6869            the number of features in the training data. Ignored if 
    6970            :obj:`learner` is specified. 
     
    8990 
    9091    :param callback: a function to be called after every iteration of 
    91             induction of classifier. This is called with parameter  
    92             (from 0.0 to 1.0) that gives estimates on learning progress. 
    93  
    94     :param name: name of the learner. 
     92            induction of classifier. The call includes a parameter 
     93            (from 0.0 to 1.0) that provides an estimate 
     94            of completion of the learning progress. 
     95 
     96    :param name: learner name. 
    9597    :type name: string 
    9698 
  • Orange/evaluation/scoring.py

    r10429 r10548  
    18161816                                "classIndex2": "class_index2"})( 
    18171817           deprecated_function_name(AUC_for_pair_of_classes))) 
    1818 AUC_matrix = replace_use_weights(deprecated_function_name(AUC_matrix)) 
     1818AUC_matrix = replace_use_weights(AUC_matrix) 
    18191819 
    18201820 
  • Orange/feature/discretization.py

    r9944 r10544  
    3131    """ 
    3232    orange.setrandseed(0) 
    33     data_new = orange.Preprocessor_discretize(data, method=Entropy()) 
     33    data_new = Orange.data.preprocess.Discretize(data, method=Entropy()) 
    3434     
    3535    attrlist = [] 
     
    109109    def __call__(self, data, weight=None): 
    110110        # filter the data and then learn 
    111         from Orange.preprocess import Preprocessor_discretize 
    112         ddata = Preprocessor_discretize(data, method=self.discretizer) 
     111        from Orange.data.preprocess import Discretize 
     112        ddata = Discretize(data, method=self.discretizer) 
    113113        if weight<>None: 
    114114            model = self.baseLearner(ddata, weight) 
  • Orange/feature/scoring.py

    r10524 r10543  
    1 import Orange.core as orange 
    2 import Orange.misc 
    3  
    4 from orange import MeasureAttribute as Score 
    5 from orange import MeasureAttributeFromProbabilities as ScoreFromProbabilities 
    6 from orange import MeasureAttribute_info as InfoGain 
    7 from orange import MeasureAttribute_gainRatio as GainRatio 
    8 from orange import MeasureAttribute_gini as Gini 
    9 from orange import MeasureAttribute_relevance as Relevance  
    10 from orange import MeasureAttribute_cost as Cost 
    11 from orange import MeasureAttribute_relief as Relief 
    12 from orange import MeasureAttribute_MSE as MSE 
     1from Orange import core, feature 
     2from Orange.statistics import contingency, distribution 
     3 
     4from Orange.misc import deprecated_keywords, deprecated_members 
     5 
     6Score = core.MeasureAttribute 
     7ScoreFromProbabilities = core.MeasureAttributeFromProbabilities 
     8InfoGain = core.MeasureAttribute_info 
     9GainRatio = core.MeasureAttribute_gainRatio 
     10Gini = core.MeasureAttribute_gini 
     11Relevance = core.MeasureAttribute_relevance 
     12Cost = core.MeasureAttribute_cost 
     13Relief = core.MeasureAttribute_relief 
     14MSE = core.MeasureAttribute_MSE 
    1315 
    1416###### 
     
    3133 
    3234        :param data: a data table used to score features 
    33         :type data: Orange.data.Table 
     35        :type data: :obj:`~Orange.data.Table` 
    3436 
    3537        :param weight: meta attribute that stores weights of instances 
    36         :type weight: Orange.feature.Descriptor 
     38        :type weight: :obj:`~Orange.feature.Descriptor` 
    3739 
    3840        """ 
     
    4648        return [x[0] for x in measured] 
    4749 
    48 OrderAttributes = Orange.misc.deprecated_members({ 
     50OrderAttributes = deprecated_members({ 
    4951          "measure": "score", 
    5052}, wrap_methods=[])(OrderAttributes) 
     
    5961    """ 
    6062 
    61     @Orange.misc.deprecated_keywords({"aprioriDist": "apriori_dist"}) 
     63    @deprecated_keywords({"aprioriDist": "apriori_dist"}) 
    6264    def __new__(cls, attr=None, data=None, apriori_dist=None, weightID=None): 
    6365        self = Score.__new__(cls) 
    64         if attr != None and data != None: 
     66        if attr is not None and data is not None: 
    6567            #self.__init__(**argkw) 
    6668            return self.__call__(attr, data, apriori_dist, weightID) 
     
    6870            return self 
    6971 
    70     @Orange.misc.deprecated_keywords({"aprioriDist": "apriori_dist"}) 
     72    @deprecated_keywords({"aprioriDist": "apriori_dist"}) 
    7173    def __call__(self, attr, data, apriori_dist=None, weightID=None): 
    7274        """Score the given feature. 
    7375 
    7476        :param attr: feature to score 
    75         :type attr: Orange.feature.Descriptor 
     77        :type attr: :obj:`~Orange.feature.Descriptor` 
    7678 
    7779        :param data: a data table used to score features 
    78         :type data: Orange.data.table 
     80        :type data: :obj:`~Orange.data.Table` 
    7981 
    8082        :param apriori_dist:  
     
    8284         
    8385        :param weightID: meta feature used to weight individual data instances 
    84         :type weightID: Orange.feature.Descriptor 
     86        :type weightID: :obj:`~Orange.feature.Descriptor` 
    8587 
    8688        """ 
    8789        import numpy 
    88         from orngContingency import Entropy 
     90        from orngContingency import Entropy #TODO: Move to new hierarchy 
    8991        if attr in data.domain:  # if we receive attr as string we have to convert to variable 
    9092            attr = data.domain[attr] 
    91         attrClassCont = orange.ContingencyAttrClass(attr, data) 
     93        attrClassCont = contingency.VarClass(attr, data) 
    9294        dist = [] 
    9395        for vals in attrClassCont.values(): 
     
    116118    """ 
    117119 
    118     @Orange.misc.deprecated_keywords({"aprioriDist": "apriori_dist"}) 
     120    @deprecated_keywords({"aprioriDist": "apriori_dist"}) 
    119121    def __new__(cls, attr=None, data=None, apriori_dist=None, weightID=None): 
    120122        self = Score.__new__(cls) 
    121         if attr != None and data != None: 
     123        if attr is not None and data is not None: 
    122124            #self.__init__(**argkw) 
    123125            return self.__call__(attr, data, apriori_dist, weightID) 
     
    125127            return self 
    126128 
    127     @Orange.misc.deprecated_keywords({"aprioriDist": "apriori_dist"}) 
     129    @deprecated_keywords({"aprioriDist": "apriori_dist"}) 
    128130    def __call__(self, attr, data, apriori_dist=None, weightID=None): 
    129131        """Score the given feature. 
    130132 
    131133        :param attr: feature to score 
    132         :type attr: Orange.feature.Descriptor 
     134        :type attr: :obj:`~Orange.feature.Descriptor` 
    133135 
    134136        :param data: a data table used to score the feature 
    135         :type data: Orange.data.table 
     137        :type data: :obj:`~Orange.data.Table` 
    136138 
    137139        :param apriori_dist:  
     
    139141         
    140142        :param weightID: meta feature used to weight individual data instances 
    141         :type weightID: Orange.feature.Descriptor 
     143        :type weightID: :obj:`~Orange.feature.Descriptor` 
    142144 
    143145        """ 
    144         attrClassCont = orange.ContingencyAttrClass(attr, data) 
    145         classDist = orange.Distribution(data.domain.classVar, data).values() 
     146        attrClassCont = contingency.VarClass(attr, data) 
     147        classDist = distribution.Distribution(data.domain.classVar, data).values() 
    146148        nCls = len(classDist) 
    147149        nEx = len(data) 
     
    177179 
    178180 
    179 @Orange.misc.deprecated_keywords({"attrList": "attr_list", "attrMeasure": "attr_score", "removeUnusedValues": "remove_unused_values"}) 
     181@deprecated_keywords({"attrList": "attr_list", "attrMeasure": "attr_score", "removeUnusedValues": "remove_unused_values"}) 
    180182def merge_values(data, attr_list, attr_score, remove_unused_values = 1): 
    181183    import orngCI 
     
    183185    newData = data.select(attr_list + [data.domain.class_var]) 
    184186    newAttr = orngCI.FeatureByCartesianProduct(newData, attr_list)[0] 
    185     dist = orange.Distribution(newAttr, newData) 
     187    dist = distribution.Distribution(newAttr, newData) 
    186188    activeValues = [] 
    187189    for i in range(len(newAttr.values)): 
     
    213215        return newAttr 
    214216 
    215     reducedAttr = orange.EnumVariable(newAttr.name, values = [newAttr.values[i] for i in activeValues]) 
     217    reducedAttr = feature.Discrete.EnumVariable(newAttr.name, values = [newAttr.values[i] for i in activeValues]) 
    216218    reducedAttr.get_value_from = newAttr.get_value_from 
    217219    reducedAttr.get_value_from.class_var = reducedAttr 
     
    220222###### 
    221223# from orngFSS 
    222 @Orange.misc.deprecated_keywords({"measure": "score"}) 
     224@deprecated_keywords({"measure": "score"}) 
    223225def score_all(data, score=Relief(k=20, m=50)): 
    224226    """Assess the quality of features using the given measure and return 
     
    226228 
    227229    :param data: data table should include a discrete class. 
    228     :type data: :obj:`Orange.data.Table` 
     230    :type data: :obj:`~Orange.data.Table` 
    229231    :param score:  feature scoring function. Derived from 
    230232      :obj:`~Orange.feature.scoring.Score`. Defaults to  
    231233      :obj:`~Orange.feature.scoring.Relief` with k=20 and m=50. 
    232     :type measure: :obj:`~Orange.feature.scoring.Score`  
    233     :rtype: :obj:`list`; a sorted (by descending score) list of 
    234       tuples (feature name, score) 
     234    :type score: :obj:`~Orange.feature.scoring.Score` 
     235    :rtype: :obj:`list`; a sorted list of tuples (feature name, score) 
    235236 
    236237    """ 
  • Orange/fixes/fix_changed_names.py

    r10378 r10542  
    472472           "orngLR.zprob":"Orange.classification.logreg.zprob", 
    473473 
    474            "orange.Preprocessor": "Orange.preprocess.Preprocessor", 
    475            "orange.Preprocessor_addCensorWeight": "Orange.preprocess.Preprocessor_addCensorWeight", 
    476            "orange.Preprocessor_addClassNoise": "Orange.preprocess.Preprocessor_addClassNoise", 
    477            "orange.Preprocessor_addClassWeight": "Orange.preprocess.Preprocessor_addClassWeight", 
    478            "orange.Preprocessor_addGaussianClassNoise": "Orange.preprocess.Preprocessor_addGaussianClassNoise", 
    479            "orange.Preprocessor_addGaussianNoise": "Orange.preprocess.Preprocessor_addGaussianNoise", 
    480            "orange.Preprocessor_addMissing": "Orange.preprocess.Preprocessor_addMissing", 
    481            "orange.Preprocessor_addMissingClasses": "Orange.preprocess.Preprocessor_addMissingClasses", 
    482            "orange.Preprocessor_addNoise": "Orange.preprocess.Preprocessor_addNoise", 
    483            "orange.Preprocessor_discretize": "Orange.preprocess.Preprocessor_discretize", 
    484            "orange.Preprocessor_drop": "Orange.preprocess.Preprocessor_drop", 
    485            "orange.Preprocessor_dropMissing": "Orange.preprocess.Preprocessor_dropMissing", 
    486            "orange.Preprocessor_dropMissingClasses": "Orange.preprocess.Preprocessor_dropMissingClasses", 
    487            "orange.Preprocessor_filter": "Orange.preprocess.Preprocessor_filter", 
    488            "orange.Preprocessor_ignore": "Orange.preprocess.Preprocessor_ignore", 
    489            "orange.Preprocessor_imputeByLearner": "Orange.preprocess.Preprocessor_imputeByLearner", 
    490            "orange.Preprocessor_removeDuplicates": "Orange.preprocess.Preprocessor_removeDuplicates", 
    491            "orange.Preprocessor_select": "Orange.preprocess.Preprocessor_select", 
    492            "orange.Preprocessor_shuffle": "Orange.preprocess.Preprocessor_shuffle", 
    493            "orange.Preprocessor_take": "Orange.preprocess.Preprocessor_take", 
    494            "orange.Preprocessor_takeMissing": "Orange.preprocess.Preprocessor_takeMissing", 
    495            "orange.Preprocessor_takeMissingClasses": "Orange.preprocess.Preprocessor_takeMissingClasses", 
     474           "orange.Preprocessor": "Orange.data.preprocess.Preprocessor", 
     475           "orange.Preprocessor_addCensorWeight": "Orange.data.preprocess.AddCensorWeight", 
     476           "orange.Preprocessor_addClassNoise": "Orange.data.preprocess.AddClassNoise", 
     477           "orange.Preprocessor_addClassWeight": "Orange.data.preprocess.AddClassWeight", 
     478           "orange.Preprocessor_addGaussianClassNoise": "Orange.data.preprocess.AddGaussianClassNoise", 
     479           "orange.Preprocessor_addGaussianNoise": "Orange.data.preprocess.AddGaussianNoise", 
     480           "orange.Preprocessor_addMissing": "Orange.data.preprocess.AddMissing", 
     481           "orange.Preprocessor_addMissingClasses": "Orange.data.preprocess.AddMissingClasses", 
     482           "orange.Preprocessor_addNoise": "Orange.data.preprocess.AddNoise", 
     483           "orange.Preprocessor_discretize": "Orange.data.preprocess.Discretize", 
     484           "orange.Preprocessor_drop": "Orange.data.preprocess.Drop", 
     485           "orange.Preprocessor_dropMissing": "Orange.data.preprocess.DropMissing", 
     486           "orange.Preprocessor_dropMissingClasses": "Orange.data.preprocess.DropMissingClasses", 
     487           "orange.Preprocessor_filter": "Orange.data.preprocess.Filter", 
     488           "orange.Preprocessor_ignore": "Orange.data.preprocess.Ignore", 
     489           "orange.Preprocessor_imputeByLearner": "Orange.data.preprocess.ImputeByLearner", 
     490           "orange.Preprocessor_removeDuplicates": "Orange.data.preprocess.RemoveDuplicates", 
     491           "orange.Preprocessor_select": "Orange.data.preprocess.Select", 
     492           "orange.Preprocessor_shuffle": "Orange.data.preprocess.Shuffle", 
     493           "orange.Preprocessor_take": "Orange.data.preprocess.Take", 
     494           "orange.Preprocessor_takeMissing": "Orange.data.preprocess.TakeMissing", 
     495           "orange.Preprocessor_takeMissingClasses": "Orange.data.preprocess.TakeMissingClasses", 
    496496 
    497497           "orange.Discretizer": "Orange.feature.discretization.Discretizer", 
     
    573573           "orngEnviron.addOrangeDirectoriesToPath": "Orange.misc.environ.add_orange_directories_to_path", 
    574574 
    575            "orngScaleData.getVariableValuesSorted": "Orange.preprocess.scaling.get_variable_values_sorted", 
    576            "orngScaleData.getVariableValueIndices": "Orange.preprocess.scaling.get_variable_value_indices", 
    577            "orngScaleData.discretizeDomain": "Orange.preprocess.scaling.discretize_domain", 
    578            "orngScaleData.orngScaleData": "Orange.preprocess.scaling.ScaleData", 
    579            "orngScaleLinProjData.orngScaleLinProjData": "Orange.preprocess.scaling.ScaleLinProjData", 
    580            "orngScalePolyvizData.orngScalePolyvizData": "Orange.preprocess.scaling.ScalePolyvizData", 
    581            "orngScaleScatterPlotData.orngScaleScatterPlotData": "Orange.preprocess.scaling.ScaleScatterPlotData", 
     575           "orngScaleData.getVariableValuesSorted": "Orange.data.preprocess.scaling.get_variable_values_sorted", 
     576           "orngScaleData.getVariableValueIndices": "Orange.data.preprocess.scaling.get_variable_value_indices", 
     577           "orngScaleData.discretizeDomain": "Orange.data.preprocess.scaling.discretize_domain", 
     578           "orngScaleData.orngScaleData": "Orange.data.preprocess.scaling.ScaleData", 
     579           "orngScaleLinProjData.orngScaleLinProjData": "Orange.data.preprocess.scaling.ScaleLinProjData", 
     580           "orngScalePolyvizData.orngScalePolyvizData": "Orange.data.preprocess.scaling.ScalePolyvizData", 
     581           "orngScaleScatterPlotData.orngScaleScatterPlotData": "Orange.data.preprocess.scaling.ScaleScatterPlotData", 
    582582 
    583583           "orngEvalAttr.mergeAttrValues": "Orange.feature.scoring.merge_values", 
  • Orange/misc/testing.py

    r10305 r10542  
    8686 
    8787import orange 
    88 from Orange.preprocess import Preprocessor_discretize, Preprocessor_continuize 
     88from Orange.data import preprocess 
    8989 
    9090TEST_CLASSIFICATION = 1 
     
    108108    dataset = orange.ExampleTable(os.path.join(datasetsdir, name)) 
    109109    if flags & CONTINUIZE_DOMAIN: 
    110         preprocessor = Preprocessor_continuize() 
     110        preprocessor = preprocess.Continuize() 
    111111        dataset = preprocessor(dataset) 
    112112    elif flags & DISCRETIZE_DOMAIN: 
    113         preprocessor = Preprocessor_discretize(method=orange.EquiNDiscretization(), 
     113        preprocessor = preprocess.Discretize(method=orange.EquiNDiscretization(), 
    114114                                               discretize_class=False) 
    115115        dataset = preprocessor(dataset) 
  • Orange/orng/orngScaleData.py

    r9671 r10542  
    1 from Orange.preprocess.scaling import get_variable_values_sorted as getVariableValuesSorted 
    2 from Orange.preprocess.scaling import get_variable_value_indices as getVariableValueIndices 
    3 from Orange.preprocess.scaling import discretize_domain as discretizeDomain 
    4 from Orange.preprocess.scaling import ScaleData as orngScaleData 
     1from Orange.data.preprocess.scaling import get_variable_values_sorted as getVariableValuesSorted 
     2from Orange.data.preprocess.scaling import get_variable_value_indices as getVariableValueIndices 
     3from Orange.data.preprocess.scaling import discretize_domain as discretizeDomain 
     4from Orange.data.preprocess.scaling import ScaleData as orngScaleData 
  • Orange/orng/orngScaleLinProjData.py

    r10475 r10542  
    11from orngScaleData import * 
    2 from Orange.preprocess.scaling import ScaleLinProjData as orngScaleLinProjData 
    3 from Orange.preprocess.scaling import graph_deprecator 
     2from Orange.data.preprocess.scaling import ScaleLinProjData as orngScaleLinProjData 
     3from Orange.data.preprocess.scaling import graph_deprecator 
  • Orange/orng/orngScalePolyvizData.py

    r9671 r10542  
    11from orngScaleLinProjData import * 
    2 from Orange.preprocess.scaling import ScalePolyvizData as orngScalePolyvizData 
     2from Orange.data.preprocess.scaling import ScalePolyvizData as orngScalePolyvizData 
  • Orange/orng/orngScaleScatterPlotData.py

    r9671 r10542  
    11from orngScaleData import * 
    2 from Orange.preprocess.scaling import ScaleScatterPlotData as orngScaleScatterPlotData 
     2from Orange.data.preprocess.scaling import ScaleScatterPlotData as orngScaleScatterPlotData 
  • Orange/orng/orngVizRank.py

    r9671 r10542  
    114114                graph.normalize_examples = 1 
    115115            elif visualizationMethod == SCATTERPLOT3D: 
    116                 from Orange.preprocess.scaling import ScaleScatterPlotData3D 
     116                from Orange.data.preprocess.scaling import ScaleScatterPlotData3D 
    117117                graph = ScaleScatterPlotData3D() 
    118118            elif visualizationMethod == SPHEREVIZ3D: 
    119                 from Orange.preprocess.scaling import ScaleLinProjData3D 
     119                from Orange.data.preprocess.scaling import ScaleLinProjData3D 
    120120                graph = ScaleLinProjData3D() 
    121121                graph.normalize_examples = 1 
    122122            elif visualizationMethod == LINEAR_PROJECTION3D: 
    123                 from Orange.preprocess.scaling import ScaleLinProjData3D 
     123                from Orange.data.preprocess.scaling import ScaleLinProjData3D 
    124124                graph = ScaleLinProjData3D() 
    125125                graph.normalize_examples = 0 
  • Orange/projection/linear.py

    r10490 r10566  
    88import numpy 
    99 
    10 from Orange.preprocess.scaling import ScaleLinProjData 
     10from Orange.data.preprocess.scaling import ScaleLinProjData 
    1111from Orange.orng import orngVisFuncts as visfuncts 
    1212from Orange.misc import deprecated_keywords 
     
    13651365            C = numpy.ma.dot(Xg, Xd) 
    13661366            U, D, T = numpy.linalg.svd(C) 
    1367  
    1368         U = U.T  # eigenvectors are now in rows 
     1367            U = U.T  # eigenvectors are now in rows 
    13691368        return U, D 
    13701369 
     
    15371536    ################ Plotting functions ################### 
    15381537 
    1539     def scree_plot(self, filename = None, title = 'Scree plot'): 
     1538    def scree_plot(self, filename = None, title = 'Scree Plot'): 
    15401539        """ 
    15411540        Draw a scree plot of principal components 
     
    15571556 
    15581557        x_axis = range(len(self.eigen_values)) 
    1559         x_labels = ["PC%d" % (i + 1, ) for i in x_axis] 
    1560  
    1561         ax.set_xticks(x_axis) 
    1562         ax.set_xticklabels(x_labels) 
    1563         plt.setp(ax.get_xticklabels(), "rotation", 90) 
     1558#        x_labels = ["PC%d" % (i + 1, ) for i in x_axis] 
     1559 
     1560#        ax.set_xticks(x_axis) 
     1561#        ax.set_xticklabels(x_labels) 
     1562#        plt.setp(ax.get_xticklabels(), "rotation", 90) 
    15641563        plt.grid(True) 
    15651564 
    1566         ax.set_xlabel('Principal components') 
     1565        ax.set_xlabel('Principal Component Number') 
    15671566        ax.set_ylabel('Proportion of Variance') 
    15681567        ax.set_title(title + "\n") 
  • Orange/regression/earth.py

    r10420 r10542  
    5555from Orange.feature import Discrete, Continuous 
    5656from Orange.data import Table, Domain 
    57 from Orange.preprocess import Preprocessor_continuize, \ 
    58                               Preprocessor_impute, \ 
    59                               Preprocessor_preprocessorList, \ 
     57from Orange.data.preprocess import Continuize as Preprocessor_continuize, \ 
     58                              Impute as Preprocessor_impute, \ 
     59                              PreprocessorList as Preprocessor_preprocessorList, \ 
    6060                              DomainContinuizer 
    6161 
  • Orange/regression/lasso.py

    r10314 r10535  
    1 """\ 
    2 ############################ 
    3 Lasso regression (``lasso``) 
    4 ############################ 
    5  
    6 .. index:: regression 
    7  
    8 .. _`Lasso regression. Regression shrinkage and selection via the lasso`: 
    9     http://www-stat.stanford.edu/~tibs/lasso/lasso.pdf 
    10  
    11  
    12 `The Lasso <http://www-stat.stanford.edu/~tibs/lasso/lasso.pdf>`_ is a shrinkage 
    13 and selection method for linear regression. It minimizes the usual sum of squared 
    14 errors, with a bound on the sum of the absolute values of the coefficients.  
    15  
    16 To fit the regression parameters on housing data set use the following code: 
    17  
    18 .. literalinclude:: code/lasso-example.py 
    19    :lines: 7,9,10,11 
    20  
    21 .. autoclass:: LassoRegressionLearner 
    22     :members: 
    23  
    24 .. autoclass:: LassoRegression 
    25     :members: 
    26  
    27  
    28 .. autoclass:: LassoRegressionLearner 
    29     :members: 
    30  
    31 .. autoclass:: LassoRegression 
    32     :members: 
    33  
    34 Utility functions 
    35 ----------------- 
    36  
    37 .. autofunction:: center 
    38  
    39 .. autofunction:: get_bootstrap_sample 
    40  
    41 .. autofunction:: permute_responses 
    42  
    43  
    44 ======== 
    45 Examples 
    46 ======== 
    47  
    48 To predict values of the response for the first five instances 
    49 use the code 
    50  
    51 .. literalinclude:: code/lasso-example.py 
    52    :lines: 14,15 
    53  
    54 Output 
    55  
    56 :: 
    57  
    58     Actual: 24.00, predicted: 24.58  
    59     Actual: 21.60, predicted: 23.30  
    60     Actual: 34.70, predicted: 24.98  
    61     Actual: 33.40, predicted: 24.78  
    62     Actual: 36.20, predicted: 24.66  
    63  
    64 To see the fitted regression coefficients, print the model 
    65  
    66 .. literalinclude:: code/lasso-example.py 
    67    :lines: 17 
    68  
    69 The output 
    70  
    71 :: 
    72  
    73     Variable  Coeff Est  Std Error          p 
    74      Intercept     22.533 
    75           CRIM     -0.000      0.023      0.480       
    76          INDUS     -0.010      0.023      0.300       
    77             RM      1.303      0.994      0.000   *** 
    78            AGE     -0.002      0.000      0.320       
    79        PTRATIO     -0.191      0.209      0.050     . 
    80          LSTAT     -0.126      0.105      0.000   *** 
    81     Signif. codes:  0 *** 0.001 ** 0.01 * 0.05 . 0.1 empty 1 
    82  
    83  
    84     For 7 variables the regression coefficient equals 0:  
    85     ZN 
    86     CHAS 
    87     NOX 
    88     DIS 
    89     RAD 
    90     TAX 
    91     B 
    92  
    93 shows that some of the regression coefficients are equal to 0.     
    94  
    95  
    96  
    97  
    98  
    99 """ 
    100  
    1011import Orange 
    1022import numpy 
  • Orange/regression/linear.py

    r10436 r10557  
    239239            else: 
    240240                X = numpy.insert(A, 0, 1, axis=1) # adds a column of ones 
    241             m += 1 
    242241        else: 
    243242            X = A 
     
    260259            cov = pinv(dot(dot(X.T, W), X)) 
    261260        else: 
    262             cov = pinv(dot(dot(X.T, W), X) + self.ridge_lambda * numpy.eye(m)) 
     261            cov = pinv(dot(dot(X.T, W), X) +  
     262                       self.ridge_lambda * numpy.eye(m + self.intercept)) 
    263263            # TODO: find inferential properties of the estimators 
    264264            compute_stats = False  
  • Orange/testing/regression/results_reference/linear-example.py.txt

    r10431 r10557  
    55Actual: 36.20, predicted: 27.94  
    66  Variable  Coeff Est  Std Error    t-value          p 
    7  Intercept     36.459      5.109      7.137      0.000   *** 
    8       CRIM     -0.108      0.033     -3.283      0.001    ** 
    9         ZN      0.046      0.014      3.378      0.001   *** 
    10      INDUS      0.021      0.062      0.334      0.739       
    11       CHAS      2.687      0.862      3.115      0.002    ** 
    12        NOX    -17.767      3.824     -4.647      0.000   *** 
    13         RM      3.810      0.418      9.107      0.000   *** 
     7 Intercept     36.459      5.103      7.144      0.000   *** 
     8      CRIM     -0.108      0.033     -3.287      0.001    ** 
     9        ZN      0.046      0.014      3.382      0.001   *** 
     10     INDUS      0.021      0.061      0.334      0.738       
     11      CHAS      2.687      0.862      3.118      0.002    ** 
     12       NOX    -17.767      3.820     -4.651      0.000   *** 
     13        RM      3.810      0.418      9.116      0.000   *** 
    1414       AGE      0.001      0.013      0.052      0.958       
    15        DIS     -1.476      0.200     -7.390      0.000   *** 
    16        RAD      0.306      0.066      4.608      0.000   *** 
    17        TAX     -0.012      0.004     -3.277      0.001    ** 
    18    PTRATIO     -0.953      0.131     -7.275      0.000   *** 
    19          B      0.009      0.003      3.463      0.001   *** 
    20      LSTAT     -0.525      0.051    -10.337      0.000   *** 
     15       DIS     -1.476      0.199     -7.398      0.000   *** 
     16       RAD      0.306      0.066      4.613      0.000   *** 
     17       TAX     -0.012      0.004     -3.280      0.001    ** 
     18   PTRATIO     -0.953      0.131     -7.283      0.000   *** 
     19         B      0.009      0.003      3.467      0.001   *** 
     20     LSTAT     -0.525      0.051    -10.347      0.000   *** 
    2121Signif. codes:  0 *** 0.001 ** 0.01 * 0.05 . 0.1 empty 1 
    2222  Variable  Coeff Est  Std Error    t-value          p 
    23  Intercept     36.341      5.073      7.164      0.000   *** 
    24      LSTAT     -0.523      0.047    -11.008      0.000   *** 
    25         RM      3.802      0.407      9.347      0.000   *** 
    26    PTRATIO     -0.947      0.129     -7.326      0.000   *** 
    27        DIS     -1.493      0.186     -8.029      0.000   *** 
    28        NOX    -17.376      3.539     -4.910      0.000   *** 
    29       CHAS      2.719      0.855      3.179      0.002    ** 
    30          B      0.009      0.003      3.471      0.001   *** 
    31         ZN      0.046      0.014      3.387      0.001   *** 
    32       CRIM     -0.108      0.033     -3.304      0.001    ** 
    33        RAD      0.300      0.063      4.721      0.000   *** 
    34        TAX     -0.012      0.003     -3.489      0.001   *** 
     23 Intercept     36.341      5.067      7.171      0.000   *** 
     24     LSTAT     -0.523      0.047    -11.019      0.000   *** 
     25        RM      3.802      0.406      9.356      0.000   *** 
     26   PTRATIO     -0.947      0.129     -7.334      0.000   *** 
     27       DIS     -1.493      0.186     -8.037      0.000   *** 
     28       NOX    -17.376      3.535     -4.915      0.000   *** 
     29      CHAS      2.719      0.854      3.183      0.002    ** 
     30         B      0.009      0.003      3.475      0.001   *** 
     31        ZN      0.046      0.014      3.390      0.001   *** 
     32      CRIM     -0.108      0.033     -3.307      0.001    ** 
     33       RAD      0.300      0.063      4.726      0.000   *** 
     34       TAX     -0.012      0.003     -3.493      0.001   *** 
    3535Signif. codes:  0 *** 0.001 ** 0.01 * 0.05 . 0.1 empty 1 
  • Orange/testing/unit/tests/test_preprocessors.py

    r10278 r10542  
    44    import unittest 
    55 
    6 from Orange.preprocess import (Preprocessor_addCensorWeight, 
    7          Preprocessor_addClassNoise, 
    8          Preprocessor_addClassWeight, 
    9          Preprocessor_addGaussianClassNoise, 
    10          Preprocessor_addGaussianNoise, 
    11          Preprocessor_addMissing, 
    12          Preprocessor_addMissingClasses, 
    13          Preprocessor_addNoise, 
    14          Preprocessor_discretize, 
    15          Preprocessor_drop, 
    16          Preprocessor_dropMissing, 
    17          Preprocessor_dropMissingClasses, 
    18          Preprocessor_filter, 
    19          Preprocessor_ignore, 
    20          Preprocessor_imputeByLearner, 
    21          Preprocessor_removeDuplicates, 
    22          Preprocessor_select, 
    23          Preprocessor_shuffle, 
    24          Preprocessor_take, 
    25          Preprocessor_takeMissing, 
    26          Preprocessor_takeMissingClasses, 
    27          Preprocessor_discretizeEntropy, 
    28          Preprocessor_removeContinuous, 
    29          Preprocessor_removeDiscrete, 
    30          Preprocessor_continuize, 
    31          Preprocessor_impute, 
    32          Preprocessor_featureSelection, 
    33          Preprocessor_RFE, 
    34          Preprocessor_sample, 
    35          Preprocessor_preprocessorList, 
     6from Orange.data.preprocess import (AddCensorWeight as Preprocessor_addCensorWeight, 
     7         AddClassNoise as  Preprocessor_addClassNoise, 
     8         AddClassWeight as Preprocessor_addClassWeight, 
     9         AddGaussianClassNoise as  Preprocessor_addGaussianClassNoise, 
     10         AddGaussianNoise as Preprocessor_addGaussianNoise, 
     11         AddMissing as Preprocessor_addMissing, 
     12         AddMissingClasses as Preprocessor_addMissingClasses, 
     13         AddNoise as Preprocessor_addNoise, 
     14         Discretize as Preprocessor_discretize, 
     15         Drop as Preprocessor_drop, 
     16         DropMissing as Preprocessor_dropMissing, 
     17         DropMissingClasses as Preprocessor_dropMissingClasses, 
     18         Filter as Preprocessor_filter, 
     19         Ignore as Preprocessor_ignore, 
     20         ImputeByLearner as Preprocessor_imputeByLearner, 
     21         RemoveDuplicates as Preprocessor_removeDuplicates, 
     22         Select as Preprocessor_select, 
     23         Shuffle as Preprocessor_shuffle, 
     24         Take as Preprocessor_take, 
     25         TakeMissing as Preprocessor_takeMissing, 
     26         TakeMissingClasses as Preprocessor_takeMissingClasses, 
     27         DiscretizeEntropy as Preprocessor_discretizeEntropy, 
     28         RemoveContinuous as Preprocessor_removeContinuous, 
     29         RemoveDiscrete as Preprocessor_removeDiscrete, 
     30         Continuize as Preprocessor_continuize, 
     31         Impute as Preprocessor_impute, 
     32         FeatureSelection as Preprocessor_featureSelection, 
     33         RFE as Preprocessor_RFE, 
     34         Sample as Preprocessor_sample, 
     35         PreprocessorList as Preprocessor_preprocessorList, 
    3636         ) 
    3737 
  • Orange/testing/unit/tests/test_table.py

    r10461 r10551  
    8181            f.flush() 
    8282            f.seek(0) 
    83             Orange.data.io.load_csv(f) 
     83            Orange.data.io.load_csv(f, has_header=True, 
     84                                    has_types=True, has_annotations=True) 
    8485 
    8586 
  • docs/reference/rst/Orange.data.rst

    r10125 r10554  
    1515    Orange.data.continuization 
    1616    Orange.data.imputation 
     17    Orange.data.preprocess 
    1718    Orange.data.utils 
    1819    Orange.data.sql 
  • docs/reference/rst/Orange.ensemble.rst

    r9372 r10540  
    33################################## 
    44 
     5.. index:: ensemble 
     6 
     7`Ensembles <http://en.wikipedia.org/wiki/Ensemble_learning>`_ use 
     8multiple models to improve prediction performance. The module 
     9implements a number of popular approaches, including bagging, 
     10boosting, stacking and forest trees. Most of these are available both 
     11for classification and regression with exception of stacking, which 
     12with present implementation supports classification only. 
     13 
     14******* 
     15Bagging 
     16******* 
     17 
     18.. index:: bagging 
     19.. index:: 
     20   single: ensemble; ensemble 
     21 
     22.. autoclass:: Orange.ensemble.bagging.BaggedLearner 
     23   :members: 
     24   :show-inheritance: 
     25 
     26.. autoclass:: Orange.ensemble.bagging.BaggedClassifier 
     27   :members: 
     28   :show-inheritance: 
     29 
     30******** 
     31Boosting 
     32******** 
     33 
     34.. index:: boosting 
     35.. index:: 
     36   single: ensemble; boosting 
     37 
     38 
     39.. autoclass:: Orange.ensemble.boosting.BoostedLearner 
     40  :members: 
     41  :show-inheritance: 
     42 
     43.. autoclass:: Orange.ensemble.boosting.BoostedClassifier 
     44   :members: 
     45   :show-inheritance: 
     46 
     47Example 
     48======= 
     49 
     50The following script fits classification models by boosting and 
     51bagging on Lymphography data set with TreeLearner and post-pruning as 
     52a base learner. Classification accuracy of the methods is estimated by 
     5310-fold cross validation (:download:`ensemble.py <code/ensemble.py>`): 
     54 
     55.. literalinclude:: code/ensemble.py 
     56  :lines: 7- 
     57 
     58Running this script demonstrates some benefit of boosting and bagging 
     59over the baseline learner:: 
     60 
     61    Classification Accuracy: 
     62               tree: 0.764 
     63       boosted tree: 0.770 
     64        bagged tree: 0.790 
     65 
     66******** 
     67Stacking 
     68******** 
     69 
     70.. index:: stacking 
     71.. index:: 
     72   single: ensemble; stacking 
     73 
     74 
     75.. autoclass:: Orange.ensemble.stacking.StackedClassificationLearner 
     76  :members: 
     77  :show-inheritance: 
     78 
     79.. autoclass:: Orange.ensemble.stacking.StackedClassifier 
     80   :members: 
     81   :show-inheritance: 
     82 
     83Example 
     84======= 
     85 
     86Stacking often produces classifiers that are more predictive than 
     87individual classifiers in the ensemble. This effect is illustrated by 
     88a script that combines four different classification 
     89algorithms (:download:`ensemble-stacking.py <code/ensemble-stacking.py>`): 
     90 
     91.. literalinclude:: code/ensemble-stacking.py 
     92  :lines: 3- 
     93 
     94The benefits of stacking on this particular data set are 
     95substantial (numbers show classification accuracy):: 
     96 
     97   stacking: 0.934 
     98      bayes: 0.858 
     99       tree: 0.688 
     100         lr: 0.764 
     101        knn: 0.830 
     102 
     103************* 
     104Random Forest 
     105************* 
     106 
     107.. index:: random forest 
     108.. index:: 
     109   single: ensemble; random forest 
     110    
     111.. autoclass:: Orange.ensemble.forest.RandomForestLearner 
     112  :members: 
     113  :show-inheritance: 
     114 
     115.. autoclass:: Orange.ensemble.forest.RandomForestClassifier 
     116  :members: 
     117  :show-inheritance: 
     118 
     119 
     120Example 
     121======== 
     122 
     123The following script assembles a random forest learner and compares it 
     124to a tree learner on a liver disorder (bupa) and housing data sets. 
     125 
     126:download:`ensemble-forest.py <code/ensemble-forest.py>` 
     127 
     128.. literalinclude:: code/ensemble-forest.py 
     129  :lines: 7- 
     130 
     131Notice that our forest contains 50 trees. Learners are compared through  
     1323-fold cross validation:: 
     133 
     134    Classification: bupa.tab 
     135    Learner  CA     Brier  AUC 
     136    tree     0.586  0.829  0.575 
     137    forest   0.710  0.392  0.752 
     138    Regression: housing.tab 
     139    Learner  MSE    RSE    R2 
     140    tree     23.708  0.281  0.719 
     141    forest   11.988  0.142  0.858 
     142 
     143Perhaps the sole purpose of the following example is to show how to 
     144access the individual classifiers once they are assembled into the 
     145forest, and to show how we can assemble a tree learner to be used in 
     146random forests. In the following example the best feature for decision 
     147nodes is selected among three randomly chosen features, and maxDepth 
     148and minExamples are both set to 5. 
     149 
     150:download:`ensemble-forest2.py <code/ensemble-forest2.py>` 
     151 
     152.. literalinclude:: code/ensemble-forest2.py 
     153  :lines: 7- 
     154 
     155Running the above code would report on sizes (number of nodes) of the tree 
     156in a constructed random forest. 
     157 
     158     
     159Feature scoring 
     160=============== 
     161 
     162L. Breiman (2001) suggested the possibility of using random forests as a 
     163non-myopic measure of feature importance. 
     164 
     165The assessment of feature relevance with random forests is based on the 
     166idea that randomly changing the value of an important feature greatly 
     167affects instance's classification, while changing the value of an 
     168unimportant feature does not affect it much. Implemented algorithm 
     169accumulates feature scores over given number of trees. Importance of 
     170all features for a single tree are computed as: correctly classified  
     171OOB instances minus correctly classified OOB instances when the feature is 
     172randomly shuffled. The accumulated feature scores are divided by the 
     173number of used trees and multiplied by 100 before they are returned. 
     174 
     175.. autoclass:: Orange.ensemble.forest.ScoreFeature 
     176  :members: 
     177 
     178Computation of feature importance with random forests is rather slow 
     179and importances for all features need to be computes 
     180simultaneously. When it is called to compute a quality of certain 
     181feature, it computes qualities for all features in the dataset. When 
     182called again, it uses the stored results if the domain is still the 
     183same and the data table has not changed (this is done by checking the 
     184data table's version and is not foolproof; it will not detect if you 
     185change values of existing instances, but will notice adding and 
     186removing instances; see the page on :class:`Orange.data.Table` for 
     187details). 
     188 
     189:download:`ensemble-forest-measure.py <code/ensemble-forest-measure.py>` 
     190 
     191.. literalinclude:: code/ensemble-forest-measure.py 
     192  :lines: 7- 
     193 
     194The output of the above script is:: 
     195 
     196    DATA:iris.tab 
     197 
     198    first: 3.91, second: 0.38 
     199 
     200    different random seed 
     201    first: 3.39, second: 0.46 
     202 
     203    All importances: 
     204       sepal length:   3.39 
     205        sepal width:   0.46 
     206       petal length:  30.15 
     207        petal width:  31.98 
     208 
     209References 
     210---------- 
     211 
     212* L Breiman. Bagging Predictors. `Technical report No. 421 
     213  <http://www.stat.berkeley.edu/tech-reports/421.ps.Z>`_. University 
     214  of California, Berkeley, 1994. 
     215* Y Freund, RE Schapire. `Experiments with a New Boosting Algorithm 
     216  <http://citeseer.ist.psu.edu/freund96experiments.html>`_. Machine 
     217  Learning: Proceedings of the Thirteenth International Conference 
     218  (ICML'96), 1996.  
     219* JR Quinlan. `Boosting, bagging, and C4.5 
     220  <http://www.rulequest.com/Personal/q.aaai96.ps>`_ . In Proc. of 13th 
     221  National Conference on Artificial Intelligence 
     222  (AAAI'96). pp. 725-730, 1996. 
     223* L Brieman. `Random Forests 
     224  <http://www.springerlink.com/content/u0p06167n6173512/>`_. Machine 
     225  Learning, 45, 5-32, 2001. 
     226* M Robnik-Sikonja. `Improving Random Forests 
     227  <http://lkm.fri.uni-lj.si/rmarko/papers/robnik04-ecml.pdf>`_. In 
     228  Proc. of European Conference on Machine Learning (ECML 2004), 
     229  pp. 359-370, 2004. 
     230 
    5231.. automodule:: Orange.ensemble 
    6232 
  • docs/reference/rst/Orange.regression.lasso.rst

    r9372 r10536  
     1############################ 
     2Lasso regression (``lasso``) 
     3############################ 
     4 
    15.. automodule:: Orange.regression.lasso 
     6 
     7.. index:: regression 
     8 
     9.. _`Lasso regression. Regression shrinkage and selection via the lasso`: 
     10    http://www-stat.stanford.edu/~tibs/lasso/lasso.pdf 
     11 
     12 
     13`The Lasso <http://www-stat.stanford.edu/~tibs/lasso/lasso.pdf>`_ is a shrinkage 
     14and selection method for linear regression. It minimizes the usual sum of squared 
     15errors, with a bound on the sum of the absolute values of the coefficients.  
     16 
     17To fit the regression parameters on housing data set use the following code: 
     18 
     19.. literalinclude:: code/lasso-example.py 
     20   :lines: 9,10,11 
     21 
     22.. autoclass:: LassoRegressionLearner 
     23    :members: 
     24 
     25.. autoclass:: LassoRegression 
     26    :members: 
     27 
     28 
     29.. autoclass:: LassoRegressionLearner 
     30    :members: 
     31 
     32.. autoclass:: LassoRegression 
     33    :members: 
     34 
     35Utility functions 
     36----------------- 
     37 
     38.. autofunction:: center 
     39 
     40.. autofunction:: get_bootstrap_sample 
     41 
     42.. autofunction:: permute_responses 
     43 
     44 
     45======== 
     46Examples 
     47======== 
     48 
     49To predict values of the response for the first five instances 
     50use the code 
     51 
     52.. literalinclude:: code/lasso-example.py 
     53   :lines: 14,15 
     54 
     55Output 
     56 
     57:: 
     58 
     59    Actual: 24.00, predicted: 24.58  
     60    Actual: 21.60, predicted: 23.30  
     61    Actual: 34.70, predicted: 24.98  
     62    Actual: 33.40, predicted: 24.78  
     63    Actual: 36.20, predicted: 24.66  
     64 
     65To see the fitted regression coefficients, print the model 
     66 
     67.. literalinclude:: code/lasso-example.py 
     68   :lines: 17 
     69 
     70The output 
     71 
     72:: 
     73 
     74    Variable  Coeff Est  Std Error          p 
     75     Intercept     22.533 
     76          CRIM     -0.000      0.023      0.480       
     77         INDUS     -0.010      0.023      0.300       
     78            RM      1.303      0.994      0.000   *** 
     79           AGE     -0.002      0.000      0.320       
     80       PTRATIO     -0.191      0.209      0.050     . 
     81         LSTAT     -0.126      0.105      0.000   *** 
     82    Signif. codes:  0 *** 0.001 ** 0.01 * 0.05 . 0.1 empty 1 
     83 
     84 
     85    For 7 variables the regression coefficient equals 0:  
     86    ZN 
     87    CHAS 
     88    NOX 
     89    DIS 
     90    RAD 
     91    TAX 
     92    B 
     93 
     94shows that some of the regression coefficients are equal to 0.     
     95 
  • docs/reference/rst/Orange.regression.rst

    r10396 r10537  
    33########################### 
    44 
    5 Orange uses the term `classification` to also denote the 
    6 regression. For instance, the dependent variable is called a `class 
    7 variable` even when it is continuous, and models are generally called 
    8 classifiers. A part of the reason is that classification and 
    9 regression rely on the same set of basic classes. 
    10  
    11 Please see the documentation on :doc:`Orange.classification` for 
    12 information on how to fit models in general. 
    13  
    14 Orange contains a number of regression models which are listed below. 
     5Orange implements a set of methods for regression modeling, that is, 
     6where the outcome - dependent variable is real-valued: 
    157 
    168.. toctree:: 
    179   :maxdepth: 1 
    1810 
    19    Orange.regression.mean 
    2011   Orange.regression.linear 
    2112   Orange.regression.lasso 
     
    2314   Orange.regression.earth 
    2415   Orange.regression.tree 
     16   Orange.regression.mean 
     17 
     18Notice that the dependent variable is in this documentation and in the 
     19implementation referred to as `class variable`. See also the documentation 
     20on :doc:`Orange.classification` for information on how to fit models 
     21and use them for prediction. 
     22 
     23************************* 
     24Base class for regression 
     25************************* 
     26 
     27All regression learners are inherited from `BaseRegressionLearner`. 
    2528 
    2629.. automodule:: Orange.regression.base 
  • docs/reference/rst/code/unusedValues.py

    r10149 r10547  
    22data = Orange.data.Table("unusedValues.tab") 
    33 
    4 new_variables = [Orange.preprocess.RemoveUnusedValues(var, data) for var in data.domain.variables] 
     4new_variables = [Orange.data.preprocess.RemoveUnusedValues(var, data) for var in data.domain.variables] 
    55 
    66print 
  • docs/reference/rst/index.rst

    r10246 r10554  
    3434   Orange.optimization 
    3535    
    36    Orange.preprocess 
    37  
    3836   Orange.projection 
    3937 
  • install-scripts/mac/dailyrun-finkonly.sh

    r8096 r10556  
    1 #!/bin/bash 
     1#!/bin/bash -e 
    22# 
    3 # Should be run as: sudo ./dailyrun-finkonly.sh 
    4 # 
     3# $1 workdir 
     4# $2 force 
     5# $3 local 
     6 
     7WORK_DIR=${1:-"/private/tmp"} 
     8FORCE=$2 
     9LOCAL=$3 
    510 
    611test -r /sw/bin/init.sh && . /sw/bin/init.sh 
     12 
     13export PATH=$HOME/bin:$PATH 
     14 
     15if [ $LOCAL ]; then 
     16    PUBLISH_DIR=$WORK_DIR/download 
     17    LOG_DIR=$WORK_DIR/logs 
     18    mkdir -p $PUBLISH_DIR 
     19    mkdir -p $LOG_DIR 
     20else 
     21    PUBLISH_DIR=/Volumes/download 
     22    LOG_DIR=/Volumes/download/buildLogs/osx 
     23fi 
     24 
     25if [ ! -e $WORK_DIR ]; then 
     26    mkdir -p $WORK_DIR 
     27fi 
     28 
     29SOURCES_DIR=$PUBLISH_DIR/sources 
     30 
     31# Get versions from PKG-INFO files (these are updated by dailyrun-sources.sh) 
     32ORANGE_VERSION=`grep "^Version:" $SOURCES_DIR/Orange.egg-info/PKG-INFO | cut -d " " -f 2` 
     33BIOINFORMATICS_VERSION=`grep "^Version:" $SOURCES_DIR/Orange_Bioinformatics.egg-info/PKG-INFO | cut -d " " -f 2` 
     34TEXT_VERSION=`grep "^Version:" $SOURCES_DIR/Orange_Text_Mining.egg-info/PKG-INFO | cut -d " " -f 2` 
     35 
     36 
     37# Source filenames 
     38ORANGE_SOURCE="Orange-${ORANGE_VERSION}.tar.gz" 
     39BIOINFORMATICS_SOURCE="Orange-Bioinformatics-${BIOINFORMATICS_VERSION}.tar.gz" 
     40TEXT_SOURCE="Orange-Text-Mining-${TEXT_VERSION}.tar.gz" 
     41 
     42 
     43# Get source packages md5 checksum 
     44ORANGE_SOURCE_MD5=`md5 -q $SOURCES_DIR/$ORANGE_SOURCE` 
     45BIOINFORMATICS_SOURCE_MD5=`md5 -q $SOURCES_DIR/$BIOINFORMATICS_SOURCE` 
     46TEXT_SOURCE_MD5=`md5 -q $SOURCES_DIR/$TEXT_SOURCE` 
    747 
    848MAC_VERSION=`sw_vers -productVersion | cut -d '.' -f 2` 
    949ARCH=`perl -MFink::FinkVersion -e 'print Fink::FinkVersion::get_arch'` 
    1050 
    11 defaults write com.apple.desktopservices DSDontWriteNetworkStores true 
    1251 
    13 /Users/ailabc/mount-dirs.sh || { echo "Mounting failed." ; exit 1 ; } 
     52FINK_ROOT=/sw 
    1453 
    15 /Users/ailabc/fink-daily-build.sh &> /private/tmp/fink-daily-build.log 
    16 EXIT_VALUE=$? 
     54if [ ! $LOCAL ]; then 
     55    # Compare with the published info files 
     56    BASE="http://orange.biolab.si/fink/dists/$ARCH/main/finkinfo" 
     57else 
     58    # Compare with the local info files 
     59    BASE="file://$FINK_ROOT/fink/dists/local/main/finkinfo" 
     60fi 
    1761 
    18 /Users/ailabc/mount-dirs.sh || { echo "Mounting failed." ; exit 1 ; } 
     62OLD_ORANGE_VERSION=`curl --silent $BASE/orange-gui-hg-py.info | grep "Version: " | cut -d" " -f 2` 
     63OLD_BIOINFORMATICS_VERSION=`curl --silent $BASE/orange-bioinformatics-gui-hg-py.info | grep "Version: " | cut -d" " -f 2` 
     64OLD_TEXT_VERSION=`curl --silent $BASE/orange-text-gui-hg-py.info | grep "Version: " | cut -d" " -f 2` 
    1965 
    20 echo "Orange (fink $MAC_VERSION $ARCH) [$EXIT_VALUE]" > "/Volumes/download/buildLogs/osx/fink-$MAC_VERSION-$ARCH-daily-build.log" 
    21 date >> "/Volumes/download/buildLogs/osx/fink-$MAC_VERSION-$ARCH-daily-build.log" 
    22 cat /private/tmp/fink-daily-build.log >> "/Volumes/download/buildLogs/osx/fink-$MAC_VERSION-$ARCH-daily-build.log" 
    23 (($EXIT_VALUE)) && echo "Running fink-daily-build.sh failed" 
     66if [[ $OLD_ORANGE_VERSION < ORANGE_VERSION ]]; then 
     67    NEW_ORANGE=1 
     68fi 
     69 
     70if [[ $OLD_BIOINFORMATICS_VERSION < BIOINFORMATICS_VERSION ]]; then 
     71    NEW_BIOINFORMATICS=1 
     72fi 
     73 
     74if [[ $OLD_TEXT_VERSION < TEXT_VERSION ]]; then 
     75    NEW_TEXT=1 
     76fi 
     77 
     78# Base url for sources in fink .info files 
     79if [ $LOCAL ]; then 
     80    BASE_URL="file://$PUBLISH_DIR/sources" 
     81else 
     82    BASE_URL="http://orange.biolab.si/download/sources" 
     83fi 
     84 
     85# Update the local finkinfo  
     86# Local info files will be moved to biolab/main/finkinfo in fink-daily-build-packages.sh 
     87FINK_INFO_DIR="$FINK_ROOT/fink/dists/local/main/finkinfo" 
     88 
     89if [ ! -e $FINK_INFO_DIR ]; then 
     90    mkdir -p $FINK_INFO_DIR 
     91fi 
     92 
     93# Directory where fink .info templates are 
     94FINK_TEMPLATES=$WORK_DIR/repos/orange/install-scripts/mac/fink 
     95 
     96FINK_LOG=$WORK_DIR/fink-daily-build.log 
     97 
     98echo "" > $FINK_LOG 
     99 
     100if [[ $NEW_ORANGE || $FORCE ]]; then 
     101    FINK_ORANGE_SOURCE_TEMPLATE="Orange-%v.tar.gz" 
     102    ./fink-register-info.sh "$FINK_TEMPLATES/orange-gui-hg-py.info" $BASE_URL/$FINK_ORANGE_SOURCE_TEMPLATE $ORANGE_SOURCE_MD5 $ORANGE_VERSION $FINK_INFO_DIR/orange-gui-hg-py.info >> $FINK_LOG 2>&1 
     103fi 
     104 
     105if [[ $NEW_BIOINFORMATICS || $FORCE ]]; then 
     106    FINK_BIOINFORMATICS_SOURCE_TEMPLATE="Orange-Bioinformatics-%v.tar.gz" 
     107    ./fink-register-info.sh "$FINK_TEMPLATES/orange-bioinformatics-gui-hg-py.info" $BASE_URL/$FINK_BIOINFORMATICS_SOURCE_TEMPLATE $BIOINFORMATICS_SOURCE_MD5 $BIOINFORMATICS_VERSION $FINK_INFO_DIR/orange-bioinformatics-gui-hg-py.info >> $FINK_LOG 2>&1 
     108fi 
     109 
     110if [[ $NEW_TEXT || $FORCE ]]; then 
     111    FINK_TEXT_SOURCE_TEMPLATE="Orange-Text-Mining-%v.tar.gz" 
     112    ./fink-register-info.sh "$FINK_TEMPLATES/orange-text-gui-hg-py.info" $BASE_URL/$FINK_TEXT_SOURCE_TEMPLATE $TEXT_SOURCE_MD5 $TEXT_VERSION $FINK_INFO_DIR/orange-text-gui-hg-py.info >> $FINK_LOG 2>&1 
     113fi 
     114 
     115if [ ! $LOCAL ]; then 
     116    /Users/ailabc/mount-dirs.sh || { echo "Mounting failed." ; exit 1 ; } 
     117fi 
     118 
     119 
     120## daily fink build 
     121if [ ! $LOCAL ]; then 
     122    ./fink-daily-build-packages.sh &> $WORK_DIR/fink-daily-build-packages.log 
     123    EXIT_VALUE=$? 
     124fi 
     125 
     126if [ ! $LOCAL ]; then 
     127    /Users/ailabc/mount-dirs.sh || { echo "Mounting failed." ; exit 1 ; } 
     128fi 
     129 
     130echo "Orange (fink $MAC_VERSION $ARCH) [$EXIT_VALUE]" > "$LOG_DIR/fink-$MAC_VERSION-$ARCH-daily-build.log" 
     131date >> "$LOG_DIR/fink-$MAC_VERSION-$ARCH-daily-build.log" 
     132cat $WORK_DIR/fink-daily-build-packages.log >> "$LOG_DIR/fink-$MAC_VERSION-$ARCH-daily-build.log" 
     133(($EXIT_VALUE)) && echo "Running fink-daily-build-packages.sh failed" 
    24134 
    25135# Zero exit value 
  • install-scripts/mac/dailyrun.sh

    r10527 r10563  
    33# Should be run as: sudo ./dailyrun.sh 
    44# 
     5# $1 workdir 
     6# $2 force 
     7# $3 local 
    58 
    6 #FORCE=true 
    7 #LOCAL=true 
     9WORK_DIR=${1:-"/private/tmp"} 
     10FORCE=$2 
     11LOCAL=$3 
     12 
     13MAC_VERSION=`sw_vers -productVersion | cut -d '.' -f 2` 
    814 
    915test -r /sw/bin/init.sh && . /sw/bin/init.sh 
     
    1117export PATH=$HOME/bin:$PATH 
    1218 
    13 WORK_DIR=/private/tmp/repos 
    14  
    1519if [ $LOCAL ]; then 
    16     PUBLISH_DIR=/private/tmp/download 
     20    PUBLISH_DIR=$WORK_DIR/download 
     21    LOG_DIR=$WORK_DIR/logs 
    1722    mkdir -p $PUBLISH_DIR 
     23    mkdir -p $LOG_DIR 
    1824else 
    1925    PUBLISH_DIR=/Volumes/download 
     26    LOG_DIR=/Volumes/download/buildLogs/osx 
    2027fi 
    2128 
     
    2431fi 
    2532 
    26 SOURCE_LOG=/private/tmp/sources-daily-build.log 
    2733 
    28 # Build source packages 
    29 ./build-source.sh https://bitbucket.org/biolab/orange orange tip $WORK_DIR Orange > $SOURCE_LOG 2>&1 
    30 EXIT_VALUE1=$? 
    31 ./build-source.sh https://bitbucket.org/biolab/orange-addon-bioinformatics bioinformatics tip $WORK_DIR Orange-Bioinformatics >> $SOURCE_LOG 2>&1 
    32 EXIT_VALUE2=$? 
    33 ./build-source.sh https://bitbucket.org/biolab/orange-addon-text text tip $WORK_DIR Orange-Text-Mining >> $SOURCE_LOG 2>&1 
    34 EXIT_VALUE3=$? 
     34SOURCE_LOG=$WORK_DIR/sources-daily-build.log 
    3535 
    36 echo "Orange (sources) [$EXIT_VALUE1 $EXIT_VALUE2 $EXIT_VALUE3]" > "/Volumes/download/buildLogs/osx/source-daily-build-hg.log" 
    37 date >> "/Volumes/download/buildLogs/osx/source-daily-build-hg.log" 
    38 cat $SOURCE_LOG > "/Volumes/download/buildLogs/osx/source-daily-build-hg.log" 
    39 (($EXIT_VALUE1 + $EXIT_VALUE2 + $EXIT_VALUE3)) && echo "Daily sources failed" 
     36./dailyrun-sources.sh $WORK_DIR $FORCE $LOCAL &> $SOURCE_LOG 
     37EXIT_VALUE=$? 
    4038 
    41 # Get versions from PKG-INFO files 
    42 ORANGE_VERSION=`grep "^Version:" $WORK_DIR/Orange.egg-info/PKG-INFO | cut -d " " -f 2` 
    43 BIOINFORMATICS_VERSION=`grep "^Version:" $WORK_DIR/Orange_Bioinformatics.egg-info/PKG-INFO | cut -d " " -f 2` 
    44 TEXT_VERSION=`grep "^Version:" $WORK_DIR/Orange_Text_Mining.egg-info/PKG-INFO | cut -d " " -f 2` 
     39defaults write com.apple.desktopservices DSDontWriteNetworkStores true 
     40 
     41if [ ! $LOCAL ]; then 
     42    /Users/ailabc/mount-dirs.sh || { echo "Mounting failed." ; exit 1 ; } 
     43fi 
     44 
     45echo "Orange (sources) [ $EXIT_VALUE ]" > "$LOG_DIR/source-daily-build-hg.log" 
     46date >> "$LOG_DIR/source-daily-build-hg.log" 
     47cat $SOURCE_LOG >> "$LOG_DIR/source-daily-build-hg.log" 
     48(($EXIT_VALUE)) && echo "Daily sources failed" 
     49 
     50SOURCES_DIR=$PUBLISH_DIR/sources 
     51 
     52# Get versions from PKG-INFO files (these are updated by dailyrun-sources) 
     53ORANGE_VERSION=`grep "^Version:" $SOURCES_DIR/Orange.egg-info/PKG-INFO | cut -d " " -f 2` 
     54BIOINFORMATICS_VERSION=`grep "^Version:" $SOURCES_DIR/Orange_Bioinformatics.egg-info/PKG-INFO | cut -d " " -f 2` 
     55TEXT_VERSION=`grep "^Version:" $SOURCES_DIR/Orange_Text_Mining.egg-info/PKG-INFO | cut -d " " -f 2` 
    4556 
    4657 
     
    5263 
    5364# Get source packages md5 checksum 
    54 ORANGE_SOURCE_MD5=`md5 -q $WORK_DIR/$ORANGE_SOURCE` 
    55 BIOINFORMATICS_SOURCE_MD5=`md5 -q $WORK_DIR/$BIOINFORMATICS_SOURCE` 
    56 TEXT_SOURCE_MD5=`md5 -q $WORK_DIR/$TEXT_SOURCE` 
     65ORANGE_SOURCE_MD5=`md5 -q $SOURCES_DIR/$ORANGE_SOURCE` 
     66BIOINFORMATICS_SOURCE_MD5=`md5 -q $SOURCES_DIR/$BIOINFORMATICS_SOURCE` 
     67TEXT_SOURCE_MD5=`md5 -q $SOURCES_DIR/$TEXT_SOURCE` 
     68 
     69## Daily bundle build from hg (for now always until versioning is established). 
     70if [[ true || $NEW_ORANGE || $NEW_BIOINFORMATICS || $NEW_TEXT || $FORCE ]]; then 
     71    /Users/ailabc/bundle-daily-build-hg.sh &> $WORK_DIR/bundle-daily-build.log 
     72    EXIT_VALUE=$? 
     73fi 
     74 
     75if [ ! $LOCAL ]; then 
     76    /Users/ailabc/mount-dirs.sh || { echo "Mounting failed." ; exit 1 ; } 
     77fi 
     78 
     79echo "Orange (bundle $MAC_VERSION from hg) [$EXIT_VALUE]" > "$LOG_DIR/bundle-$MAC_VERSION-daily-build-hg.log" 
     80date >> "$LOG_DIR/bundle-$MAC_VERSION-daily-build-hg.log" 
     81cat $WORK_DIR/bundle-daily-build.log >> "$LOG_DIR/bundle-$MAC_VERSION-daily-build-hg.log" 
     82(($EXIT_VALUE)) && echo "Running bundle-daily-build-hg.sh failed" 
    5783 
    5884 
     
    6086ARCH=`perl -MFink::FinkVersion -e 'print Fink::FinkVersion::get_arch'` 
    6187 
    62 defaults write com.apple.desktopservices DSDontWriteNetworkStores true 
     88FINK_ROOT=/sw 
    6389 
    6490if [ ! $LOCAL ]; then 
    65     /Users/ailabc/mount-dirs.sh || { echo "Mounting failed." ; exit 1 ; } 
     91    # Compare with the published info files 
     92    BASE="http://orange.biolab.si/fink/dists/$ARCH/main/finkinfo" 
     93else 
     94    # Compare with the local info files 
     95    BASE="file://$FINK_ROOT/fink/dists/local/main/finkinfo" 
    6696fi 
    6797 
    68 # Base url for sources 
     98 
     99OLD_ORANGE_VERSION=`curl --silent $BASE/orange-gui-hg-py.info | grep "Version: " | cut -d" " -f 2` 
     100OLD_BIOINFORMATICS_VERSION=`curl --silent $BASE/orange-bioinformatics-gui-hg-py.info | grep "Version: " | cut -d" " -f 2` 
     101OLD_TEXT_VERSION=`curl --silent $BASE/orange-text-gui-hg-py.info | grep "Version: " | cut -d" " -f 2` 
     102 
     103if [[ $OLD_ORANGE_VERSION < ORANGE_VERSION ]]; then 
     104    NEW_ORANGE=1 
     105fi 
     106 
     107if [[ $OLD_BIOINFORMATICS_VERSION < BIOINFORMATICS_VERSION ]]; then 
     108    NEW_BIOINFORMATICS=1 
     109fi 
     110 
     111if [[ $OLD_TEXT_VERSION < TEXT_VERSION ]]; then 
     112    NEW_TEXT=1 
     113fi 
     114 
     115 
     116# Base url for sources in fink .info files 
    69117if [ $LOCAL ]; then 
    70118    BASE_URL="file://$PUBLISH_DIR/sources" 
     
    73121fi 
    74122 
    75 # Base dir for sources 
    76 SOURCES_DIR=$PUBLISH_DIR/sources 
    77  
    78  
    79 # Publish sources 
    80  
    81 if [ ! -e $SOURCES_DIR ]; then 
    82     mkdir -p $SOURCES_DIR 
    83 fi 
    84  
    85 if [[ ! -e $SOURCES_DIR/$ORANGE_SOURCE || $FORCE ]]; then 
    86     cp $WORK_DIR/$ORANGE_SOURCE $SOURCES_DIR/$ORANGE_SOURCE 
    87     NEW_ORANGE=1 
    88 fi 
    89  
    90 if [[ ! -e $SOURCES_DIR/BIOINFORMATICS_SOURCE || $FORCE ]]; then 
    91     cp $WORK_DIR/$BIOINFORMATICS_SOURCE $SOURCES_DIR/$BIOINFORMATICS_SOURCE 
    92     NEW_BIOINFORMATICS=1 
    93 fi 
    94  
    95 if [[ ! -e $SOURCES_DIR/TEXT_SOURCE || $FORCE ]]; then 
    96     cp $WORK_DIR/$TEXT_SOURCE $SOURCES_DIR/$TEXT_SOURCE 
    97     NEW_TEXT=1 
    98 fi 
    99  
    100 FINK_ROOT=/sw 
    101  
    102123# Update the local finkinfo  
    103 # Local info files will be copied to biolab/main/finkinfo in fink-daily-build-packages.sh 
     124# Local info files will be moved to biolab/main/finkinfo in fink-daily-build-packages.sh 
    104125FINK_INFO_DIR="$FINK_ROOT/fink/dists/local/main/finkinfo" 
    105126 
     
    111132FINK_TEMPLATES=$WORK_DIR/orange/install-scripts/mac/fink 
    112133 
    113 FINK_LOG=/private/tmp/bundle-daily-build.log 
     134FINK_LOG=$WORK_DIR/fink-daily-build.log 
    114135echo "" > $FINK_LOG 
    115136 
     
    134155 
    135156 
    136 ## Daily bundle build from hg 
    137 if [[ $NEW_ORANGE || $NEW_BIOINFORMATICS || $NEW_TEXT || $FORCE ]]; then 
    138     /Users/ailabc/bundle-daily-build-hg.sh &> /private/tmp/bundle-daily-build.log 
    139     EXIT_VALUE=$? 
    140 fi 
    141  
    142 if [ ! $LOCAL ]; then 
    143     /Users/ailabc/mount-dirs.sh || { echo "Mounting failed." ; exit 1 ; } 
    144 fi 
    145  
    146 echo "Orange (bundle $MAC_VERSION from hg) [$EXIT_VALUE]" > "/Volumes/download/buildLogs/osx/bundle-$MAC_VERSION-daily-build-hg.log" 
    147 date >> "/Volumes/download/buildLogs/osx/bundle-$MAC_VERSION-daily-build-hg.log" 
    148 cat /private/tmp/bundle-daily-build.log >> "/Volumes/download/buildLogs/osx/bundle-$MAC_VERSION-daily-build-hg.log" 
    149 (($EXIT_VALUE)) && echo "Running bundle-daily-build-hg.sh failed" 
    150  
    151  
    152157## daily fink build 
    153158 
    154 /Users/ailabc/fink-daily-build-packages.sh &> /private/tmp/fink-daily-build-packages.log 
     159./fink-daily-build-packages.sh &> $WORK_DIR/fink-daily-build-packages.log 
    155160EXIT_VALUE=$? 
    156161 
     
    159164fi 
    160165 
    161 echo "Orange (fink $MAC_VERSION $ARCH) [$EXIT_VALUE]" > "/Volumes/download/buildLogs/osx/fink-$MAC_VERSION-$ARCH-daily-build.log" 
    162 date >> "/Volumes/download/buildLogs/osx/fink-$MAC_VERSION-$ARCH-daily-build.log" 
    163 cat /private/tmp/fink-daily-build-packages.log >> "/Volumes/download/buildLogs/osx/fink-$MAC_VERSION-$ARCH-daily-build.log" 
     166echo "Orange (fink $MAC_VERSION $ARCH) [$EXIT_VALUE]" > "$LOG_DIR/fink-$MAC_VERSION-$ARCH-daily-build.log" 
     167date >> "$LOG_DIR/fink-$MAC_VERSION-$ARCH-daily-build.log" 
     168cat $WORK_DIR/fink-daily-build-packages.log >> "$LOG_DIR/fink-$MAC_VERSION-$ARCH-daily-build.log" 
    164169(($EXIT_VALUE)) && echo "Running fink-daily-build.sh failed" 
    165170 
  • install-scripts/mac/fink-daily-build-packages.sh

    • Property exe set to *
    r10500 r10556  
    7575rm -f $FINK_ROOT/fink/dists/biolab/main/finkinfo/all.tgz 
    7676 
    77 # Copy info files from local/main/finkinfo 
     77# Move info files from local/main/finkinfo (put there by dailyru[-finkonly].sh 
    7878echo "Updating new fink info files." 
    7979mv $FINK_ROOT/fink/dists/local/main/finkinfo/*.info $FINK_ROOT/fink/dists/biolab/main/finkinfo/ 
  • install-scripts/mac/update-all-scripts.sh

    r10505 r10562  
    2525curl --silent --output fink-register-info.sh https://bitbucket.org/biolab/orange/raw/tip/install-scripts/mac/fink-register-info.sh 
    2626curl --silent --output build-source.sh https://bitbucket.org/biolab/orange/raw/tip/install-scripts/mac/build-source.sh 
     27curl --silent --output dailyrun-sources.sh https://bitbucket.org/biolab/orange/raw/tip/install-scripts/mac/dailyrun-sources.sh 
    2728 
    2829chmod +x *.sh 
  • source/orange/lib_kernel.cpp

    r10482 r10568  
    17801780  return Py_BuildValue("O(Os#ii)N", getExportedFunction("__pickleLoaderRandomGenerator"), 
    17811781                                    self->ob_type, 
    1782                                     (char *)(mt.state), (mt.next-mt.state + mt.left + 1) * sizeof(long), 
     1782                                    (char *)(mt.state), 625 * sizeof(long), 
    17831783                                    mt.next - mt.state, 
    17841784                                    mt.left, 
Note: See TracChangeset for help on using the changeset viewer.