# source:orange/Orange/regression/linear.py@10580:c4cbae8dcf8b

Revision 10580:c4cbae8dcf8b, 21.4 KB checked in by markotoplak, 2 years ago (diff)

Moved deprecation functions, progress bar support and environ into Orange.utils. Orange imports cleanly, although it is not tested yet.

Line
1"""\
2##############################
3Linear regression (``linear``)
4##############################
5
6.. index:: regression, linear model
7
8.. `Linear regression`: http://en.wikipedia.org/wiki/Linear_regression
9
10
11`Linear regression <http://en.wikipedia.org/wiki/Linear_regression>`_ is a statistical regression method
12which tries to predict a value of a continuous response (class) variable based on the values of several predictors.
13The model assumes that the response variable is a linear combination of the predictors, the task of linear regression
14is therefore to fit the unknown coefficients.
15
16To fit the regression parameters on housing data set use the following code:
17
18.. literalinclude:: code/linear-example.py
19   :lines: 7,9,10,11
20
21
22.. autoclass:: LinearRegressionLearner
23    :members:
24
25.. autoclass:: LinearRegression
26    :members:
27
28Utility functions
29-----------------
30
31.. autofunction:: stepwise
32
33
34========
35Examples
36========
37
38==========
39Prediction
40==========
41
42Predict values of the first 5 data instances
43
44.. literalinclude:: code/linear-example.py
45   :lines: 13-15
46
47The output of this code is
48
49::
50
51    Actual: 24.000, predicted: 30.004
52    Actual: 21.600, predicted: 25.026
53    Actual: 34.700, predicted: 30.568
54    Actual: 33.400, predicted: 28.607
55    Actual: 36.200, predicted: 27.944
56
57=========================
58Poperties of fitted model
59=========================
60
61
62Print regression coefficients with standard errors, t-scores, p-values
63and significances
64
65.. literalinclude:: code/linear-example.py
66   :lines: 17
67
68The code output is
69
70::
71
72    Variable  Coeff Est  Std Error    t-value          p
73     Intercept     36.459      5.103      7.144      0.000   ***
74          CRIM     -0.108      0.033     -3.287      0.001    **
75            ZN      0.046      0.014      3.382      0.001   ***
76         INDUS      0.021      0.061      0.334      0.738
77          CHAS      2.687      0.862      3.118      0.002    **
78           NOX    -17.767      3.820     -4.651      0.000   ***
79            RM      3.810      0.418      9.116      0.000   ***
80           AGE      0.001      0.013      0.052      0.958
81           DIS     -1.476      0.199     -7.398      0.000   ***
82           RAD      0.306      0.066      4.613      0.000   ***
83           TAX     -0.012      0.004     -3.280      0.001    **
84       PTRATIO     -0.953      0.131     -7.283      0.000   ***
85             B      0.009      0.003      3.467      0.001   ***
86         LSTAT     -0.525      0.051    -10.347      0.000   ***
87    Signif. codes:  0 *** 0.001 ** 0.01 * 0.05 . 0.1 empty 1
88
89
90
91===================
92Stepwise regression
93===================
94
95To use stepwise regression initialize learner with stepwise=True.
96The upper and lower bound for significance are contolled with
98
99.. literalinclude:: code/linear-example.py
100   :lines: 20-23,25
101
102As you can see from the output the non-significant coefficients
103have been removed from the output
104
105::
106
107    Variable  Coeff Est  Std Error    t-value          p
108     Intercept     36.341      5.067      7.171      0.000   ***
109         LSTAT     -0.523      0.047    -11.019      0.000   ***
110            RM      3.802      0.406      9.356      0.000   ***
111       PTRATIO     -0.947      0.129     -7.334      0.000   ***
112           DIS     -1.493      0.186     -8.037      0.000   ***
113           NOX    -17.376      3.535     -4.915      0.000   ***
114          CHAS      2.719      0.854      3.183      0.002    **
115             B      0.009      0.003      3.475      0.001   ***
116            ZN      0.046      0.014      3.390      0.001   ***
117          CRIM     -0.108      0.033     -3.307      0.001    **
118           RAD      0.300      0.063      4.726      0.000   ***
119           TAX     -0.012      0.003     -3.493      0.001   ***
120    Signif. codes:  0 *** 0.001 ** 0.01 * 0.05 . 0.1 empty 1
121
122
123
124"""
125
126
127import Orange
128from Orange.regression import base
129import numpy
130
131try:
132    from scipy import stats
133except ImportError:
134    from Orange import statc as stats
135
136from numpy import dot, sqrt
137from numpy.linalg import inv, pinv
138
139
140from Orange.utils import deprecated_members, deprecated_keywords
141
142class LinearRegressionLearner(base.BaseRegressionLearner):
143
144    """Fits the linear regression model, i.e. learns the regression parameters
145    The class is derived from
146    :class:`Orange.regression.base.BaseRegressionLearner`
147    which is used for preprocessing the data (continuization and imputation)
148    before fitting the regression parameters.
149
150    """
151
152    def __init__(self, name='linear regression', intercept=True,
153                 compute_stats=True, ridge_lambda=None, imputer=None,
154                 continuizer=None, use_vars=None, stepwise=False,
156        """
157        :param name: name of the linear model, default 'linear regression'
158        :type name: string
159        :param intercept: if True, the intercept beta0 is included
160            in the model
161        :type intercept: bool
162        :param compute_stats: if True, statistical properties of
163            the estimators (standard error, t-scores, significances)
164            and statistical properties of the model
165            (sum of squares, R2, adjusted R2) are computed
166        :type compute_stats: bool
167        :param ridge_lambda: if not None, ridge regression is performed
168                             with the given lambda parameter controlling
169                             the regularization
170        :type ridge_lambda: :obj:`int` or :obj:`None`
171        :param use_vars: the list of independent varaiables included in
172            regression model. If None (default) all variables are used
173        :type use_vars: list of Orange.feature.Descriptor or None
174        :param stepwise: if True, `stepwise regression
175            <http://en.wikipedia.org/wiki/Stepwise_regression>`_
176            based on F-test is performed. The significance parameters are
178        :type stepwise: bool
179        :param add_sig: lower bound of significance for which the variable
180            is included in regression model
181            default value = 0.05
183        :param remove_sig: upper bound of significance for which
184            the variable is excluded from the regression model
185            default value = 0.2
186        :type remove_sig: float
187        """
188        self.name = name
189        self.intercept = intercept
190        self.compute_stats = compute_stats
191        self.ridge_lambda = ridge_lambda
192        self.set_imputer(imputer=imputer)
193        self.set_continuizer(continuizer=continuizer)
194        self.stepwise = stepwise
196        self.remove_sig = remove_sig
197        self.use_vars = use_vars
198        self.__dict__.update(kwds)
199
200    def __call__(self, table, weight=None, verbose=0):
201        """
202        :param table: data instances.
203        :type table: :class:`Orange.data.Table`
204        :param weight: the weights for instances. Default: None, i.e.
205            all data instances are eqaully important in fitting
206            the regression parameters
207        :type weight: None or list of Orange.feature.Continuous
208            which stores weights for instances
209        """
210        if not self.use_vars is None:
211            new_domain = Orange.data.Domain(self.use_vars,
212                                            table.domain.class_var)
214            table = Orange.data.Table(new_domain, table)
215
216        # dicrete values are continuized
217        table = self.continuize_table(table)
218
219        # missing values are imputed
220        table = self.impute_table(table)
221
222        if self.stepwise:
224                                remove_sig=self.remove_sig)
225            new_domain = Orange.data.Domain(use_vars, table.domain.class_var)
227            table = Orange.data.Table(new_domain, table)
228
229        # convertion to numpy
230        A, y, w = table.to_numpy()
231        if A is None:
232            n, m = len(table), 0
233        else:
234            n, m = numpy.shape(A)
235
236        if self.intercept:
237            if A is None:
238                X = numpy.ones([n, 1])
239            else:
240                X = numpy.insert(A, 0, 1, axis=1) # adds a column of ones
241        else:
242            X = A
243
244        domain = table.domain
245
246        if weight:
247            weights = numpy.sqrt([float(ins[weight]) for ins in table])
248            X = weights.reshape(n, 1) * X
249            y = weights * y
250
251        cov = dot(X.T, X)
252
253        if self.ridge_lambda:
254            cov += self.ridge_lambda * numpy.eye(m + self.intercept)
255
256        # adds some robustness by computing the pseudo inverse;
257        # normal inverse could fail due to the singularity of X.T * X
258        invcov = pinv(cov)
259        D = dot(invcov, X.T)
260        coefficients = dot(D, y)
261
262        mu_y, sigma_y = numpy.mean(y), numpy.std(y)
263        if A is not None:
264            cov_x = numpy.cov(X, rowvar=0)
265
266            # standardized coefficients
267            std_coefficients = sqrt(cov_x.diagonal()) / sigma_y * coefficients
268        else:
269            std_coefficients = None
270
271        # TODO: find inferential properties of the estimators for ridge
272        if self.compute_stats is False or self.ridge_lambda:
273            return LinearRegression(domain.class_var, domain,
274                coefficients=coefficients, std_coefficients=std_coefficients,
275                intercept=self.intercept)
276
277
278        fitted = dot(X, coefficients)
279        residuals = [ins.get_class() - fitted[i]
280                     for i, ins in enumerate(table)]
281
282        # model summary
283        # total sum of squares (total variance)
284        sst = numpy.sum((y - mu_y) ** 2)
285        # sum of squares due to regression (explained variance)
286        ssr = numpy.sum((fitted - mu_y) ** 2)
287        # error sum of squares (unexplaied variance)
288        sse = sst - ssr
289        # coefficient of determination
290        r2 = ssr / sst
291        r2adj = 1 - (1 - r2) * (n - 1) / (n - m - 1)
292        F = (ssr / m) / (sst - ssr / (n - m - 1))
293        df = n - 2
294        sigma_square = sse / (n - m - 1)
295        # standard error of the regression estimator, t-scores and p-values
296        std_error = sqrt(sigma_square * invcov.diagonal())
297        t_scores = coefficients / std_error
298        p_vals = [stats.betai(df * 0.5, 0.5, df / (df + t * t))
299                  for t in t_scores]
300
301        # dictionary of regression coefficients with standard errors
302        # and p-values
303        dict_model = {}
304        if self.intercept:
305            dict_model["Intercept"] = (coefficients[0], std_error[0],
306                                       t_scores[0], p_vals[0])
307        for i, var in enumerate(domain.attributes):
308            j = i + 1 if self.intercept else i
309            dict_model[var.name] = (coefficients[j], std_error[j],
310                                    t_scores[j], p_vals[j])
311
312        return LinearRegression(domain.class_var, domain, coefficients, F,
313                 std_error=std_error, t_scores=t_scores, p_vals=p_vals,
314                 dict_model=dict_model, fitted=fitted, residuals=residuals,
316                 ssr=ssr, std_coefficients=std_coefficients,
317                 intercept=self.intercept)
318
319deprecated_members({"ridgeLambda": "ridge_lambda",
320                    "computeStats": "compute_stats",
321                    "useVars": "use_vars",
323                    "removeSig": "remove_sig",
324                    }
325                   , ["__init__"],
326                   in_place=True)(LinearRegressionLearner)
327
328class LinearRegression(Orange.classification.Classifier):
329
330    """Linear regression predicts value of the response variable
331    based on the values of independent variables.
332
333    .. attribute:: F
334
335        F-statistics of the model.
336
337    .. attribute:: coefficients
338
339        Regression coefficients stored in list. If the intercept is included
340        the first item corresponds to the estimated intercept.
341
342    .. attribute:: std_error
343
344        Standard errors of the coefficient estimator, stored in list.
345
346    .. attribute:: t_scores
347
348        List of t-scores for the estimated regression coefficients.
349
350    .. attribute:: p_vals
351
352        List of p-values for the null hypothesis that the regression
353        coefficients equal 0 based on t-scores and two sided
354        alternative hypothesis.
355
356    .. attribute:: dict_model
357
358        Statistical properties of the model in a dictionary:
359        Keys - names of the independent variables (or "Intercept")
360        Values - tuples (coefficient, standard error,
361        t-value, p-value)
362
363    .. attribute:: fitted
364
365        Estimated values of the dependent variable for all instances
366        from the training table.
367
368    .. attribute:: residuals
369
370        Differences between estimated and actual values of the
371        dependent variable for all instances from the training table.
372
373    .. attribute:: m
374
375        Number of independent (predictor) variables.
376
377    .. attribute:: n
378
379        Number of instances.
380
381    .. attribute:: mu_y
382
383        Sample mean of the dependent variable.
384
385    .. attribute:: r2
386
387        `Coefficient of determination
388        <http://en.wikipedia.org/wiki/Coefficient_of_determination>`_.
389
390
392
394
395    .. attribute:: sst, sse, ssr
396
397        Total sum of squares, explained sum of squares and
398        residual sum of squares respectively.
399
400    .. attribute:: std_coefficients
401
402        Standardized regression coefficients.
403
404    """
405
406
407
408    def __init__(self, class_var=None, domain=None, coefficients=None, F=None,
409                 std_error=None, t_scores=None, p_vals=None, dict_model=None,
410                 fitted=None, residuals=None, m=None, n=None, mu_y=None,
411                 r2=None, r2adj=None, sst=None, sse=None, ssr=None,
412                 std_coefficients=None, intercept=None):
413        """
414        :param model: fitted linear regression model
415        :type model: :class:`LinearRegressionLearner`
416        """
417        self.class_var = class_var
418        self.domain = domain
419        self.coefficients = coefficients
420        self.F = F
421        self.std_error = std_error
422        self.t_scores = t_scores
423        self.p_vals = p_vals
424        self.dict_model = dict_model
425        self.fitted = fitted
426        self.residuals = residuals
427        self.m = m
428        self.n = n
429        self.mu_y = mu_y
430        self.r2 = r2
432        self.sst = sst
433        self.sse = sse
434        self.ssr = ssr
435        self.std_coefficients = std_coefficients
436        self.intercept = intercept
437
438    def __call__(self, instance,
439                 result_type=Orange.classification.Classifier.GetValue):
440        """
441        :param instance: data instance for which the value of the response
442                         variable will be predicted
443        :type instance: :obj:`~Orange.data.Instance`
444        """
445        ins = Orange.data.Instance(self.domain, instance)
446        ins = numpy.array(ins.native())
447        if "?" in ins: # missing value -> corresponding coefficient omitted
448            def miss_2_0(x): return x if x != "?" else 0
449            ins = map(miss_2_0, ins)
450
451        if self.intercept:
452            if len(self.coefficients) > 1:
453                y_hat = self.coefficients[0] + dot(self.coefficients[1:],
454                                                   ins[:-1])
455            else:
456                if len(ins) == 1:
457                    print ins
458                    y_hat = self.mu_y
459                else:
460                    y_hat = dot(self.coefficients, ins[:-1])
461        else:
462            y_hat = dot(self.coefficients, ins[:-1])
463#        y_hat = Orange.data.Value(y_hat)
464        y_hat = self.class_var(y_hat)
465        dist = Orange.statistics.distribution.Continuous(self.class_var)
466        dist[y_hat] = 1.
467        if result_type == Orange.classification.Classifier.GetValue:
468            return y_hat
469        if result_type == Orange.classification.Classifier.GetProbabilities:
470            return dist
471        return (y_hat, dist)
472
473
474    def to_string(self):
475        """Pretty-prints linear regression model,
476        i.e. estimated regression coefficients with standard errors, t-scores
477        and significances.
478
479        """
480        from string import join
481        labels = ('Variable', 'Coeff Est', 'Std Error', 't-value', 'p')
482        lines = [join(['%10s' % l for l in labels], ' ')]
483
484        fmt = "%10s " + join(["%10.3f"] * 4, " ") + " %5s"
485        if not self.p_vals:
486            raise ValueError("Model does not contain model statistics.")
487        def get_star(p):
488            if p < 0.001: return  "*" * 3
489            elif p < 0.01: return "*" * 2
490            elif p < 0.05: return "*"
491            elif p < 0.1: return  "."
492            else: return " "
493
494        if self.intercept == True:
495            stars =  get_star(self.p_vals[0])
496            lines.append(fmt % ('Intercept', self.coefficients[0],
497                                self.std_error[0], self.t_scores[0],
498                                self.p_vals[0], stars))
499            for i in range(len(self.domain.attributes)):
500                stars = get_star(self.p_vals[i + 1])
501                lines.append(fmt % (self.domain.attributes[i].name,
502                             self.coefficients[i + 1], self.std_error[i + 1],
503                             self.t_scores[i + 1], self.p_vals[i + 1], stars))
504        else:
505            for i in range(len(self.domain.attributes)):
506                stars = get_star(self.p_vals[i])
507                lines.append(fmt % (self.domain.attributes[i].name,
508                             self.coefficients[i], self.std_error[i],
509                             self.t_scores[i], self.p_vals[i], stars))
510        lines.append("Signif. codes:  0 *** 0.001 ** 0.01 * 0.05 . 0.1 empty 1")
511        return "\n".join(lines)
512
513    def __str__(self):
514        return self.to_string()
515
516
517def compare_models(c1, c2):
518    """ Compares if classifiaction model c1 is significantly better
519    than model c2. The comparison is based on F-test, the p-value
520    is returned.
521
522    :param c1, c2: linear regression model objects.
523    :type lr: :class:`LinearRegression`
524
525    """
526    if c1 == None or c2 == None:
527        return 1.0
528    p1, p2, n = c1.m, c2.m, c1.n
530    if RSS1 <= RSS2 or p2 <= p1 or n <= p2 or RSS2 <= 0:
531        return 1.0
533    return stats.fprob(int(p2 - p1), int(n - p2), F)
534
535
538    """ Performs `stepwise linear regression
539    <http://en.wikipedia.org/wiki/Stepwise_regression>`_:
540    on table and returns the list of remaing independent variables
541    which fit a significant linear regression model.coefficients
542
543    :param table: data instances.
544    :type table: :class:`Orange.data.Table`
545    :param weight: the weights for instances. Default: None, i.e. all data
546        instances are eqaully important in fitting the regression parameters
547    :type weight: None or list of Orange.feature.Continuous
548        which stores the weights
549    :param add_sig: lower bound of significance for which the variable
550        is included in regression model
551        default value = 0.05
553    :param remove_sig: upper bound of significance for which the variable
554        is excluded from the regression model
555        default value = 0.2
556    :type remove_sig: float
557    """
558
559
560    inc_vars = []
561    not_inc_vars = table.domain.attributes
562
563    changed_model = True
564    while changed_model:
565        changed_model = False
566        # remove all unsignificant conditions (learn several models,
567        # where each time one variable is removed and check significance)
568        c0 = LinearRegressionLearner(table, use_vars=inc_vars)
569        reduced_model = [] # reduced model
570        for ati in range(len(inc_vars)):
571            try:
572                reduced_model.append(LinearRegressionLearner(table, weight,
573                        use_vars=inc_vars[:ati] + inc_vars[(ati + 1):]))
574            except Exception:
575                reduced_model.append(None)
576
577        sigs = [compare_models(r, c0) for r in reduced_model]
578        if sigs and max(sigs) > remove_sig:
579            # remove that variable, start again
580            crit_var = inc_vars[sigs.index(max(sigs))]
581            not_inc_vars.append(crit_var)
582            inc_vars.remove(crit_var)
583            changed_model = True
584            continue
585
586        # add all significant conditions (go through all attributes in
587        # not_inc_vars, is there one that significantly improves the model?
588        extended_model = []
589        for ati in range(len(not_inc_vars)):
590            try:
591                extended_model.append(LinearRegressionLearner(table,
592                        weight, use_vars=inc_vars + [not_inc_vars[ati]]))
593            except Exception:
594                extended_model.append(None)
595
596        sigs = [compare_models(c0, r) for r in extended_model]
597        if sigs and min(sigs) < add_sig:
598            best_var = not_inc_vars[sigs.index(min(sigs))]
599            inc_vars.append(best_var)
600            not_inc_vars.remove(best_var)
601            changed_model = True
602    return inc_vars
603
604
605if __name__ == "__main__":
606
607    import Orange
608    from Orange.regression import linear
609
610    table = Orange.data.Table("housing.tab")
611    c = LinearRegressionLearner(table)
612    print c
613
Note: See TracBrowser for help on using the repository browser.