Changeset 7288:f06b1a7df69b in orange


Ignore:
Timestamp:
02/03/11 00:36:34 (3 years ago)
Author:
matija <matija.polajnar@…>
Branch:
default
Convert:
1915fa63eba1fe59c29905677b30ee9b1af3e806
Message:

Refactoring to CamelCase.

Location:
orange
Files:
3 edited

Legend:

Unmodified
Added
Removed
  • orange/Orange/classification/rules.py

    r7275 r7288  
    8484    RuleClassifier_logit, \ 
    8585    RuleLearner, \ 
    86     Rule, \ 
    8786    RuleBeamCandidateSelector, \ 
    8887    RuleBeamCandidateSelector_TakeAll, \ 
     
    151150        else: return (pTruePositive-pClass)/max(pRule,1e-6) 
    152151 
     152class Rule(Orange.core.Rule): 
     153    """ 
     154    Wrapper around a native Rule class to add print method. 
     155    """ 
     156     
     157    def __init__(self, nativeBayesClassifier): 
     158        self.nativeBayesClassifier = nativeBayesClassifier 
     159        for k, v in self.nativeBayesClassifier.__dict__.items(): 
     160            self.__dict__[k] = v 
     161   
     162    def __call__(self, instance, result_type=Orange.core.Classifier.GetValue, 
     163                 *args, **kwdargs): 
     164        """Classify a new instance 
     165         
     166        :param instance: instance to be classifier 
     167        :type instance: :class:`Orange.data.Instance` 
     168        :param result_type: :class:`Orange.core.Classifier.GetValue` or \ 
     169              :class:`Orange.core.Classifier.GetProbabilities` or 
     170              :class:`Orange.core.Classifier.GetBoth` 
     171         
     172        :rtype: :class:`Orange.data.Value`,  
     173              :class:`Orange.statistics.Distribution` or a tuple with both 
     174        """ 
     175        self.nativeBayesClassifier(instance, *args, **kwdargs) 
     176 
     177    def __setattr__(self, name, value): 
     178        if name == "nativeBayesClassifier": 
     179            self.__dict__[name] = value 
     180            return 
     181        if name in self.nativeBayesClassifier.__dict__: 
     182            self.nativeBayesClassifier.__dict__[name] = value 
     183        self.__dict__[name] = value 
     184     
     185    def p(self, class_, instance): 
     186        """Return probability of single class 
     187        Probability is not normalized and can be different from probability 
     188        returned from __call__ 
     189         
     190        :param class_: 
     191        :type class_: 
     192        :param instance: instance to be classified 
     193        :type instance: :class:`Orange.data.Instance` 
     194         
     195        """ 
     196        return self.nativeBayesClassifier.p(class_, instance) 
     197     
     198    def printModel(self): 
     199        """Print classificator in human friendly format""" 
     200        nValues=len(self.classVar.values) 
     201        frmtStr=' %10.3f'*nValues 
     202        classes=" "*20+ ((' %10s'*nValues) % tuple([i[:10] for i in self.classVar.values])) 
     203        print classes 
     204        print "class probabilities "+(frmtStr % tuple(self.distribution)) 
     205        print 
     206     
     207        for i in self.conditionalDistributions: 
     208            print "Attribute", i.variable.name 
     209            print classes 
     210            for v in range(len(i.variable.values)): 
     211                print ("%20s" % i.variable.values[v][:20]) + (frmtStr % tuple(i[v])) 
     212            print 
    153213 
    154214class CN2Learner(RuleLearner): 
     
    289349        self.ruleFinder.validator = RuleValidator_LRS(alpha = alpha) 
    290350        self.ruleFinder.ruleStoppingValidator = RuleValidator_LRS(alpha = 1.0) 
    291         self.ruleStopping = RuleStopping_apriori() 
     351        self.ruleStopping = RuleStopping_Apriori() 
    292352        self.dataStopping = RuleDataStoppingCriteria_NoPositives() 
    293353         
     
    430490        CN2UnorderedLearnerClass.__init__(self, evaluator = evaluator, 
    431491                                          beamWidth = beamWidth, alpha = alpha, **kwds) 
    432         self.coverAndRemove = CovererAndRemover_multWeights(mult=mult) 
     492        self.coverAndRemove = CovererAndRemover_MultWeights(mult=mult) 
    433493 
    434494    def __call__(self, instances, weight=0):         
     
    442502 
    443503 
    444 def ruleToString(rule, showDistribution = True): 
    445     def selectSign(oper): 
    446         if oper == Orange.core.ValueFilter_continuous.Less: 
    447             return "<" 
    448         elif oper == Orange.core.ValueFilter_continuous.LessEqual: 
    449             return "<=" 
    450         elif oper == Orange.core.ValueFilter_continuous.Greater: 
    451             return ">" 
    452         elif oper == Orange.core.ValueFilter_continuous.GreaterEqual: 
    453             return ">=" 
    454         else: return "=" 
    455  
    456     if not rule: 
    457         return "None" 
    458     conds = rule.filter.conditions 
    459     domain = rule.filter.domain 
    460      
    461     ret = "IF " 
    462     if len(conds)==0: 
    463         ret = ret + "TRUE" 
    464  
    465     for i,c in enumerate(conds): 
    466         if i > 0: 
    467             ret += " AND " 
    468         if type(c) == Orange.core.ValueFilter_discrete: 
    469             ret += domain[c.position].name + "=" + str([domain[c.position].values[int(v)] for v in c.values]) 
    470         elif type(c) == Orange.core.ValueFilter_continuous: 
    471             ret += domain[c.position].name + selectSign(c.oper) + str(c.ref) 
    472     if rule.classifier and type(rule.classifier) == Orange.core.DefaultClassifier and rule.classifier.defaultVal: 
    473         ret = ret + " THEN "+domain.classVar.name+"="+\ 
    474         str(rule.classifier.defaultValue) 
    475         if showDistribution: 
    476             ret += str(rule.classDistribution) 
    477     elif rule.classifier and type(rule.classifier) == Orange.core.DefaultClassifier and type(domain.classVar) == Orange.core.EnumVariable: 
    478         ret = ret + " THEN "+domain.classVar.name+"="+\ 
    479         str(rule.classDistribution.modus()) 
    480         if showDistribution: 
    481             ret += str(rule.classDistribution) 
    482     return ret         
    483  
    484  
    485 class mEstimate(RuleEvaluator): 
     504class MEstimate(RuleEvaluator): 
     505    """ 
     506    Rule evaluator using m-estimate of probability rule evaluation function. 
     507    :param m: m-value for m-estimate 
     508    :type m: integer 
     509     
     510    """ 
    486511    def __init__(self, m=2): 
    487512        self.m = m 
     
    501526        return p 
    502527 
    503 class RuleStopping_apriori(RuleStoppingCriteria): 
     528class RuleStopping_Apriori(RuleStoppingCriteria): 
    504529    def __init__(self, apriori=None): 
    505530        self.apriori =  None 
     
    525550            return len(rule.filter.conditions) <= self.length 
    526551        return True     
    527      
    528  
    529 def supervisedClassCheck(instances): 
    530     if not instances.domain.classVar: 
    531         raise Exception("Class variable is required!") 
    532     if instances.domain.classVar.varType == Orange.core.VarTypes.Continuous: 
    533         raise Exception("CN2 requires a discrete class!") 
    534      
    535  
    536  
    537  
    538  
    539 class RuleClassifier_bestRule(RuleClassifier): 
     552 
     553 
     554class NoDuplicatesValidator(RuleValidator): 
     555    def __init__(self,alpha=.05,min_coverage=0,max_rule_length=0,rules=RuleList()): 
     556        self.rules = rules 
     557        self.validator = RuleValidator_LRS(alpha=alpha,min_coverage=min_coverage,max_rule_length=max_rule_length) 
     558         
     559    def __call__(self, rule, data, weightID, targetClass, apriori): 
     560        if rule_in_set(rule,self.rules): 
     561            return False 
     562        return bool(self.validator(rule,data,weightID,targetClass,apriori)) 
     563                 
     564class RuleStopping_SetRules(RuleStoppingCriteria): 
     565    def __init__(self,validator): 
     566        self.ruleStopping = RuleStoppingCriteria_NegativeDistribution() 
     567        self.validator = validator 
     568 
     569    def __call__(self,rules,rule,instances,data):         
     570        ru_st = self.ruleStopping(rules,rule,instances,data) 
     571        if not ru_st: 
     572            self.validator.rules.append(rule) 
     573        return bool(ru_st) 
     574 
     575 
     576class RuleClassifier_BestRule(RuleClassifier): 
    540577    def __init__(self, rules, instances, weightID = 0, **argkw): 
    541578        self.rules = rules 
     
    576613        return retStr     
    577614 
    578 class CovererAndRemover_multWeights(RuleCovererAndRemover): 
     615class CovererAndRemover_MultWeights(RuleCovererAndRemover): 
    579616    def __init__(self, mult = 0.7): 
    580617        self.mult = mult 
     
    594631        return (instances,newWeightsID) 
    595632 
    596 class CovererAndRemover_addWeights(RuleCovererAndRemover): 
     633class CovererAndRemover_AddWeights(RuleCovererAndRemover): 
    597634    def __call__(self, rule, instances, weights, targetClass): 
    598635        if not weights: 
     
    620657        return (instances,newWeightsID) 
    621658 
    622 def rule_in_set(rule,rules): 
    623     for r in rules: 
    624         if rules_equal(rule,r): 
    625             return True 
    626     return False 
    627  
    628 def rules_equal(rule1,rule2): 
    629     if not len(rule1.filter.conditions)==len(rule2.filter.conditions): 
    630         return False 
    631     for c1 in rule1.filter.conditions: 
    632         found=False # find the same condition in the other rule 
    633         for c2 in rule2.filter.conditions: 
    634             try: 
    635                 if not c1.position == c2.position: continue # same attribute? 
    636                 if not type(c1) == type(c2): continue # same type of condition 
    637                 if type(c1) == Orange.core.ValueFilter_discrete: 
    638                     if not type(c1.values[0]) == type(c2.values[0]): continue 
    639                     if not c1.values[0] == c2.values[0]: continue # same value? 
    640                 if type(c1) == Orange.core.ValueFilter_continuous: 
    641                     if not c1.oper == c2.oper: continue # same operator? 
    642                     if not c1.ref == c2.ref: continue #same threshold? 
    643                 found=True 
    644                 break 
    645             except: 
    646                 pass 
    647         if not found: 
    648             return False 
    649     return True 
    650  
    651 class noDuplicates_validator(RuleValidator): 
    652     def __init__(self,alpha=.05,min_coverage=0,max_rule_length=0,rules=RuleList()): 
    653         self.rules = rules 
    654         self.validator = RuleValidator_LRS(alpha=alpha,min_coverage=min_coverage,max_rule_length=max_rule_length) 
    655          
    656     def __call__(self, rule, data, weightID, targetClass, apriori): 
    657         if rule_in_set(rule,self.rules): 
    658             return False 
    659         return bool(self.validator(rule,data,weightID,targetClass,apriori)) 
    660                  
    661 class ruleSt_setRules(RuleStoppingCriteria): 
    662     def __init__(self,validator): 
    663         self.ruleStopping = RuleStoppingCriteria_NegativeDistribution() 
    664         self.validator = validator 
    665  
    666     def __call__(self,rules,rule,instances,data):         
    667         ru_st = self.ruleStopping(rules,rule,instances,data) 
    668         if not ru_st: 
    669             self.validator.rules.append(rule) 
    670         return bool(ru_st) 
    671      
    672  
    673 # Miscellaneous - utility functions 
    674 def avg(l): 
    675     if len(l)==0: 
    676         return 0. 
    677     return sum(l)/len(l) 
    678  
    679 def var(l): 
    680     if len(l)<2: 
    681         return 0. 
    682     av = avg(l) 
    683     vars=[math.pow(li-av,2) for li in l] 
    684     return sum(vars)/(len(l)-1) 
    685  
    686 def median(l): 
    687     if len(l)==0: 
    688         return 0.     
    689     l.sort() 
    690     le = len(l) 
    691     if le%2 == 1: 
    692         return l[(le-1)/2] 
    693     else: 
    694         return (l[le/2-1]+l[le/2])/2 
    695  
    696 def perc(l,p): 
    697     l.sort() 
    698     return l[int(math.floor(p*len(l)))] 
    699  
    700 def createRandomDataSet(data): 
    701     newData = Orange.data.Table(data) 
    702     # shuffle data 
    703     cl_num = newData.toNumeric("C") 
    704     random.shuffle(cl_num[0][:,0]) 
    705     clData = Orange.data.Table(Orange.data.Domain([newData.domain.classVar]),cl_num[0]) 
    706     for d_i,d in enumerate(newData): 
    707         d[newData.domain.classVar] = clData[d_i][newData.domain.classVar] 
    708     return newData 
    709  
    710 # estimated fisher tippett parameters for a set of values given in vals list (+ deciles) 
    711 def compParameters(vals,oldMi=0.5,oldBeta=1.1):                     
    712     # compute percentiles 
    713     vals.sort() 
    714     N = len(vals) 
    715     percs = [avg(vals[int(float(N)*i/10):int(float(N)*(i+1)/10)]) for i in range(10)]             
    716     if N<10: 
    717         return oldMi, oldBeta, percs 
    718     beta = math.sqrt(6*var(vals)/math.pow(math.pi,2)) 
    719     beta = min(2.0,max(oldBeta, beta)) 
    720     mi = max(oldMi, avg(vals) - 0.57721*beta) 
    721     return mi, beta, percs 
    722  
    723  
    724 def computeDists(data, weight=0, targetClass=0, N=100, learner=None): 
    725     """ Compute distributions of likelihood ratio statistics of extreme (best) rules.  """ 
    726     if not learner: 
    727         learner = createLearner() 
    728  
    729     ######################### 
    730     ## Learner preparation ## 
    731     ######################### 
    732     oldStopper = learner.ruleFinder.ruleStoppingValidator 
    733     evaluator = learner.ruleFinder.evaluator 
    734     learner.ruleFinder.evaluator = RuleEvaluator_LRS() 
    735     learner.ruleFinder.evaluator.storeRules = True 
    736     learner.ruleFinder.ruleStoppingValidator = RuleValidator_LRS(alpha=1.0) 
    737     learner.ruleFinder.ruleStoppingValidator.max_rule_complexity = 0   
    738  
    739     # loop through N (sampling repetitions) 
    740     maxVals = [] 
    741     for d_i in range(N): 
    742         # create data set (remove and randomize) 
    743         tempData = createRandomDataSet(data) 
    744         learner.ruleFinder.evaluator.rules = RuleList() 
    745         # Next, learn a rule 
    746         bestRule = learner.ruleFinder(tempData,weight,targetClass,RuleList()) 
    747         maxVals.append(bestRule.quality) 
    748     extremeDists=[compParameters(maxVals,1.0,1.0)] 
    749  
    750     ##################### 
    751     ## Restore learner ## 
    752     ##################### 
    753     learner.ruleFinder.evaluator = evaluator 
    754     learner.ruleFinder.ruleStoppingValidator = oldStopper 
    755     return extremeDists 
    756  
    757 def createEVDistList(evdList): 
    758     l = Orange.core.EVDistList() 
    759     for el in evdList: 
    760         l.append(Orange.core.EVDist(mu=el[0],beta=el[1],percentiles=el[2])) 
    761     return l 
    762659 
    763660class CovererAndRemover_Prob(RuleCovererAndRemover): 
     
    811708        # compute factor 
    812709        return (instances,weights) 
     710 
     711 
     712class CN2EVCUnorderedLearner(ABCN2): 
     713    """This is implementation of CN2 + EVC as evaluation + LRC classification. 
     714        Main parameters: 
     715          -- ... 
     716    """ 
     717    def __init__(self, width=5, nsampling=100, rule_sig=1.0, att_sig=1.0, min_coverage = 1., max_rule_complexity = 5.): 
     718        ABCN2.__init__(self, width=width, nsampling=nsampling, rule_sig=rule_sig, att_sig=att_sig, 
     719                       min_coverage=int(min_coverage), max_rule_complexity = int(max_rule_complexity)) 
     720 
     721def ruleToString(rule, showDistribution = True): 
     722    """ 
     723    Write a string presentation of rule in human readable format. 
     724    :param rule: rule to pretty-print. 
     725    :type rule: :class:`Orange.classification.rules.Rules` 
     726    :param showDistribution: determines whether presentation should also 
     727        contain the distribution of covered instances 
     728    :type showDistribution: boolean 
     729     
     730    """ 
     731    def selectSign(oper): 
     732        if oper == Orange.core.ValueFilter_continuous.Less: 
     733            return "<" 
     734        elif oper == Orange.core.ValueFilter_continuous.LessEqual: 
     735            return "<=" 
     736        elif oper == Orange.core.ValueFilter_continuous.Greater: 
     737            return ">" 
     738        elif oper == Orange.core.ValueFilter_continuous.GreaterEqual: 
     739            return ">=" 
     740        else: return "=" 
     741 
     742    if not rule: 
     743        return "None" 
     744    conds = rule.filter.conditions 
     745    domain = rule.filter.domain 
     746     
     747    ret = "IF " 
     748    if len(conds)==0: 
     749        ret = ret + "TRUE" 
     750 
     751    for i,c in enumerate(conds): 
     752        if i > 0: 
     753            ret += " AND " 
     754        if type(c) == Orange.core.ValueFilter_discrete: 
     755            ret += domain[c.position].name + "=" + str([domain[c.position].values[int(v)] for v in c.values]) 
     756        elif type(c) == Orange.core.ValueFilter_continuous: 
     757            ret += domain[c.position].name + selectSign(c.oper) + str(c.ref) 
     758    if rule.classifier and type(rule.classifier) == Orange.core.DefaultClassifier and rule.classifier.defaultVal: 
     759        ret = ret + " THEN "+domain.classVar.name+"="+\ 
     760        str(rule.classifier.defaultValue) 
     761        if showDistribution: 
     762            ret += str(rule.classDistribution) 
     763    elif rule.classifier and type(rule.classifier) == Orange.core.DefaultClassifier and type(domain.classVar) == Orange.core.EnumVariable: 
     764        ret = ret + " THEN "+domain.classVar.name+"="+\ 
     765        str(rule.classDistribution.modus()) 
     766        if showDistribution: 
     767            ret += str(rule.classDistribution) 
     768    return ret         
     769 
     770def supervisedClassCheck(instances): 
     771    if not instances.domain.classVar: 
     772        raise Exception("Class variable is required!") 
     773    if instances.domain.classVar.varType == Orange.core.VarTypes.Continuous: 
     774        raise Exception("CN2 requires a discrete class!") 
     775     
     776 
     777 
     778 
     779 
     780 
     781def rule_in_set(rule,rules): 
     782    for r in rules: 
     783        if rules_equal(rule,r): 
     784            return True 
     785    return False 
     786 
     787def rules_equal(rule1,rule2): 
     788    if not len(rule1.filter.conditions)==len(rule2.filter.conditions): 
     789        return False 
     790    for c1 in rule1.filter.conditions: 
     791        found=False # find the same condition in the other rule 
     792        for c2 in rule2.filter.conditions: 
     793            try: 
     794                if not c1.position == c2.position: continue # same feature? 
     795                if not type(c1) == type(c2): continue # same type of condition 
     796                if type(c1) == Orange.core.ValueFilter_discrete: 
     797                    if not type(c1.values[0]) == type(c2.values[0]): continue 
     798                    if not c1.values[0] == c2.values[0]: continue # same value? 
     799                if type(c1) == Orange.core.ValueFilter_continuous: 
     800                    if not c1.oper == c2.oper: continue # same operator? 
     801                    if not c1.ref == c2.ref: continue #same threshold? 
     802                found=True 
     803                break 
     804            except: 
     805                pass 
     806        if not found: 
     807            return False 
     808    return True 
     809 
     810 
     811# Miscellaneous - utility functions 
     812def avg(l): 
     813    if len(l)==0: 
     814        return 0. 
     815    return sum(l)/len(l) 
     816 
     817def var(l): 
     818    if len(l)<2: 
     819        return 0. 
     820    av = avg(l) 
     821    vars=[math.pow(li-av,2) for li in l] 
     822    return sum(vars)/(len(l)-1) 
     823 
     824def median(l): 
     825    if len(l)==0: 
     826        return 0.     
     827    l.sort() 
     828    le = len(l) 
     829    if le%2 == 1: 
     830        return l[(le-1)/2] 
     831    else: 
     832        return (l[le/2-1]+l[le/2])/2 
     833 
     834def perc(l,p): 
     835    l.sort() 
     836    return l[int(math.floor(p*len(l)))] 
     837 
     838def createRandomDataSet(data): 
     839    newData = Orange.data.Table(data) 
     840    # shuffle data 
     841    cl_num = newData.toNumeric("C") 
     842    random.shuffle(cl_num[0][:,0]) 
     843    clData = Orange.data.Table(Orange.data.Domain([newData.domain.classVar]),cl_num[0]) 
     844    for d_i,d in enumerate(newData): 
     845        d[newData.domain.classVar] = clData[d_i][newData.domain.classVar] 
     846    return newData 
     847 
     848# estimated fisher tippett parameters for a set of values given in vals list (+ deciles) 
     849def compParameters(vals,oldMi=0.5,oldBeta=1.1):                     
     850    # compute percentiles 
     851    vals.sort() 
     852    N = len(vals) 
     853    percs = [avg(vals[int(float(N)*i/10):int(float(N)*(i+1)/10)]) for i in range(10)]             
     854    if N<10: 
     855        return oldMi, oldBeta, percs 
     856    beta = math.sqrt(6*var(vals)/math.pow(math.pi,2)) 
     857    beta = min(2.0,max(oldBeta, beta)) 
     858    mi = max(oldMi, avg(vals) - 0.57721*beta) 
     859    return mi, beta, percs 
     860 
     861 
     862def computeDists(data, weight=0, targetClass=0, N=100, learner=None): 
     863    """ Compute distributions of likelihood ratio statistics of extreme (best) rules.  """ 
     864    if not learner: 
     865        learner = createLearner() 
     866 
     867    ######################### 
     868    ## Learner preparation ## 
     869    ######################### 
     870    oldStopper = learner.ruleFinder.ruleStoppingValidator 
     871    evaluator = learner.ruleFinder.evaluator 
     872    learner.ruleFinder.evaluator = RuleEvaluator_LRS() 
     873    learner.ruleFinder.evaluator.storeRules = True 
     874    learner.ruleFinder.ruleStoppingValidator = RuleValidator_LRS(alpha=1.0) 
     875    learner.ruleFinder.ruleStoppingValidator.max_rule_complexity = 0   
     876 
     877    # loop through N (sampling repetitions) 
     878    maxVals = [] 
     879    for d_i in range(N): 
     880        # create data set (remove and randomize) 
     881        tempData = createRandomDataSet(data) 
     882        learner.ruleFinder.evaluator.rules = RuleList() 
     883        # Next, learn a rule 
     884        bestRule = learner.ruleFinder(tempData,weight,targetClass,RuleList()) 
     885        maxVals.append(bestRule.quality) 
     886    extremeDists=[compParameters(maxVals,1.0,1.0)] 
     887 
     888    ##################### 
     889    ## Restore learner ## 
     890    ##################### 
     891    learner.ruleFinder.evaluator = evaluator 
     892    learner.ruleFinder.ruleStoppingValidator = oldStopper 
     893    return extremeDists 
     894 
     895def createEVDistList(evdList): 
     896    l = Orange.core.EVDistList() 
     897    for el in evdList: 
     898        l.append(Orange.core.EVDist(mu=el[0],beta=el[1],percentiles=el[2])) 
     899    return l 
     900 
     901 
     902 
    813903 
    814904def add_sub_rules(rules, instances, weight, learner, dists): 
     
    857947    return newRules 
    858948 
    859 #def CN2EVCUnorderedLearner(instances = None, weightID=0, **kwds): 
    860 #    cn2 = CN2EVCUnorderedLearnerClass(**kwds) 
    861 #    if instances: 
    862 #        return cn2(instances, weightID) 
    863 #    else: 
    864 #        return cn2 
    865      
    866 class CN2EVCUnorderedLearner(ABCN2): 
    867     """This is implementation of CN2 + EVC as evaluation + LRC classification. 
    868         Main parameters: 
    869           -- ... 
    870     """ 
    871     def __init__(self, width=5, nsampling=100, rule_sig=1.0, att_sig=1.0, min_coverage = 1., max_rule_complexity = 5.): 
    872         ABCN2.__init__(self, width=width, nsampling=nsampling, rule_sig=rule_sig, att_sig=att_sig, 
    873                        min_coverage=int(min_coverage), max_rule_complexity = int(max_rule_complexity)) 
  • orange/orngABCN2.py

    r7223 r7288  
    455455        cn2_learner.ruleFinder = Orange.core.RuleBeamFinder() 
    456456        cn2_learner.ruleFinder.refiner = SelectorArgConditions(crit_example, allowed_conditions) 
    457         cn2_learner.ruleFinder.evaluator = Orange.classification.rules.mEstimate(self.ruleFinder.evaluator.m) 
     457        cn2_learner.ruleFinder.evaluator = Orange.classification.rules.MEstimate(self.ruleFinder.evaluator.m) 
    458458        rule = cn2_learner.ruleFinder(examples,weightID,0,Orange.core.RuleList()) 
    459459        return rule.filter.conditions 
  • orange/orngCN2.py

    r7223 r7288  
    22from Orange.classification.rules import LaplaceEvaluator 
    33from Orange.classification.rules import WRACCEvaluator 
    4 from Orange.classification.rules import mEstimate 
    5 from Orange.classification.rules import RuleStopping_apriori 
     4from Orange.classification.rules import MEstimate as mEstimate 
     5from Orange.classification.rules import RuleStopping_Apriori as RuleStopping_apriori 
    66from Orange.classification.rules import LengthValidator 
    77from Orange.classification.rules import supervisedClassCheck 
     
    1010from Orange.classification.rules import CN2UnorderedLearner 
    1111from Orange.classification.rules import CN2UnorderedClassifier 
    12 from Orange.classification.rules import RuleClassifier_bestRule 
    13 from Orange.classification.rules import CovererAndRemover_multWeights 
    14 from Orange.classification.rules import CovererAndRemover_addWeights 
     12from Orange.classification.rules import RuleClassifier_BestRule as RuleClassifier_bestRule 
     13from Orange.classification.rules import CovererAndRemover_MultWeights as CovererAndRemover_multWeights 
     14from Orange.classification.rules import CovererAndRemover_AddWeights as CovererAndRemover_addWeights 
    1515from Orange.classification.rules import rule_in_set 
    1616from Orange.classification.rules import rules_equal 
    17 from Orange.classification.rules import noDuplicates_validator 
    18 from Orange.classification.rules import ruleSt_setRules 
     17from Orange.classification.rules import NoDuplicatesValidator as noDuplicates_validator 
     18from Orange.classification.rules import RuleStopping_SetRules as ruleSt_setRules 
    1919from Orange.classification.rules import CN2SDUnorderedLearner 
    2020from Orange.classification.rules import avg 
Note: See TracChangeset for help on using the changeset viewer.