# source:orange/Orange/regression/linear.py@10615:e97eeb46ebfa

Revision 10615:e97eeb46ebfa, 21.2 KB checked in by Lan Zagar <lan.zagar@…>, 2 years ago (diff)

Some cleanup in linear regression.

Line
1"""\
2##############################
3Linear regression (``linear``)
4##############################
5
6.. index:: regression, linear model
7
8.. `Linear regression`: http://en.wikipedia.org/wiki/Linear_regression
9
10
11`Linear regression <http://en.wikipedia.org/wiki/Linear_regression>`_ is a statistical regression method
12which tries to predict a value of a continuous response (class) variable based on the values of several predictors.
13The model assumes that the response variable is a linear combination of the predictors, the task of linear regression
14is therefore to fit the unknown coefficients.
15
16To fit the regression parameters on housing data set use the following code:
17
18.. literalinclude:: code/linear-example.py
19   :lines: 7,9,10,11
20
21
22.. autoclass:: LinearRegressionLearner
23    :members:
24
25.. autoclass:: LinearRegression
26    :members:
27
28Utility functions
29-----------------
30
31.. autofunction:: stepwise
32
33
34========
35Examples
36========
37
38==========
39Prediction
40==========
41
42Predict values of the first 5 data instances
43
44.. literalinclude:: code/linear-example.py
45   :lines: 13-15
46
47The output of this code is
48
49::
50
51    Actual: 24.000, predicted: 30.004
52    Actual: 21.600, predicted: 25.026
53    Actual: 34.700, predicted: 30.568
54    Actual: 33.400, predicted: 28.607
55    Actual: 36.200, predicted: 27.944
56
57=========================
58Poperties of fitted model
59=========================
60
61
62Print regression coefficients with standard errors, t-scores, p-values
63and significances
64
65.. literalinclude:: code/linear-example.py
66   :lines: 17
67
68The code output is
69
70::
71
72    Variable  Coeff Est  Std Error    t-value          p
73     Intercept     36.459      5.103      7.144      0.000   ***
74          CRIM     -0.108      0.033     -3.287      0.001    **
75            ZN      0.046      0.014      3.382      0.001   ***
76         INDUS      0.021      0.061      0.334      0.738
77          CHAS      2.687      0.862      3.118      0.002    **
78           NOX    -17.767      3.820     -4.651      0.000   ***
79            RM      3.810      0.418      9.116      0.000   ***
80           AGE      0.001      0.013      0.052      0.958
81           DIS     -1.476      0.199     -7.398      0.000   ***
82           RAD      0.306      0.066      4.613      0.000   ***
83           TAX     -0.012      0.004     -3.280      0.001    **
84       PTRATIO     -0.953      0.131     -7.283      0.000   ***
85             B      0.009      0.003      3.467      0.001   ***
86         LSTAT     -0.525      0.051    -10.347      0.000   ***
87    Signif. codes:  0 *** 0.001 ** 0.01 * 0.05 . 0.1 empty 1
88
89
90
91===================
92Stepwise regression
93===================
94
95To use stepwise regression initialize learner with stepwise=True.
96The upper and lower bound for significance are contolled with
98
99.. literalinclude:: code/linear-example.py
100   :lines: 20-23,25
101
102As you can see from the output the non-significant coefficients
103have been removed from the output
104
105::
106
107    Variable  Coeff Est  Std Error    t-value          p
108     Intercept     36.341      5.067      7.171      0.000   ***
109         LSTAT     -0.523      0.047    -11.019      0.000   ***
110            RM      3.802      0.406      9.356      0.000   ***
111       PTRATIO     -0.947      0.129     -7.334      0.000   ***
112           DIS     -1.493      0.186     -8.037      0.000   ***
113           NOX    -17.376      3.535     -4.915      0.000   ***
114          CHAS      2.719      0.854      3.183      0.002    **
115             B      0.009      0.003      3.475      0.001   ***
116            ZN      0.046      0.014      3.390      0.001   ***
117          CRIM     -0.108      0.033     -3.307      0.001    **
118           RAD      0.300      0.063      4.726      0.000   ***
119           TAX     -0.012      0.003     -3.493      0.001   ***
120    Signif. codes:  0 *** 0.001 ** 0.01 * 0.05 . 0.1 empty 1
121
122
123
124"""
125
126
127import Orange
128from Orange.regression import base
129import numpy
130
131try:
132    from scipy import stats
133except ImportError:
134    from Orange import statc as stats
135
136from numpy import dot, sqrt
137from numpy.linalg import inv, pinv
138
139
140from Orange.utils import deprecated_members, deprecated_keywords
141
142class LinearRegressionLearner(base.BaseRegressionLearner):
143
144    """Fits the linear regression model, i.e. learns the regression parameters
145    The class is derived from
146    :class:`Orange.regression.base.BaseRegressionLearner`
147    which is used for preprocessing the data (continuization and imputation)
148    before fitting the regression parameters.
149
150    """
151
152    def __init__(self, name='linear regression', intercept=True,
153                 compute_stats=True, ridge_lambda=None, imputer=None,
154                 continuizer=None, use_vars=None, stepwise=False,
156        """
157        :param name: name of the linear model, default 'linear regression'
158        :type name: string
159        :param intercept: if True, the intercept beta0 is included
160            in the model
161        :type intercept: bool
162        :param compute_stats: if True, statistical properties of
163            the estimators (standard error, t-scores, significances)
164            and statistical properties of the model
165            (sum of squares, R2, adjusted R2) are computed
166        :type compute_stats: bool
167        :param ridge_lambda: if not None, ridge regression is performed
168                             with the given lambda parameter controlling
169                             the regularization
170        :type ridge_lambda: :obj:`int` or :obj:`None`
171        :param use_vars: the list of independent varaiables included in
172            regression model. If None (default) all variables are used
173        :type use_vars: list of Orange.feature.Descriptor or None
174        :param stepwise: if True, `stepwise regression
175            <http://en.wikipedia.org/wiki/Stepwise_regression>`_
176            based on F-test is performed. The significance parameters are
178        :type stepwise: bool
179        :param add_sig: lower bound of significance for which the variable
180            is included in regression model
181            default value = 0.05
183        :param remove_sig: upper bound of significance for which
184            the variable is excluded from the regression model
185            default value = 0.2
186        :type remove_sig: float
187        """
188        self.name = name
189        self.intercept = intercept
190        self.compute_stats = compute_stats
191        self.ridge_lambda = ridge_lambda
192        self.set_imputer(imputer=imputer)
193        self.set_continuizer(continuizer=continuizer)
194        self.stepwise = stepwise
196        self.remove_sig = remove_sig
197        self.use_vars = use_vars
198        self.__dict__.update(kwds)
199
200    def __call__(self, table, weight=None, verbose=0):
201        """
202        :param table: data instances.
203        :type table: :class:`Orange.data.Table`
204        :param weight: the weights for instances. Default: None, i.e.
205            all data instances are equally important in fitting
206            the regression parameters
207        :type weight: None or list of Orange.feature.Continuous
208            which stores weights for instances
209        """
210        if self.use_vars is not None:
211            new_domain = Orange.data.Domain(self.use_vars,
212                                            table.domain.class_var)
214            table = Orange.data.Table(new_domain, table)
215
216        # discrete values are continuized
217        table = self.continuize_table(table)
218
219        # missing values are imputed
220        table = self.impute_table(table)
221
222        if self.stepwise:
224                                remove_sig=self.remove_sig)
225            new_domain = Orange.data.Domain(use_vars, table.domain.class_var)
227            table = Orange.data.Table(new_domain, table)
228
229        domain = table.domain
230
231        # convert to numpy
232        X, y, w = table.to_numpy()
233        n, m = numpy.shape(X)
234
235        if self.intercept:
236            X = numpy.insert(X, 0, 1, axis=1) # adds a column of ones
237
238        if weight:
239            weights = numpy.sqrt([float(ins[weight]) for ins in table])
240            X = weights.reshape(n, 1) * X
241            y = weights * y
242
243        cov = dot(X.T, X)
244
245        if self.ridge_lambda:
246            stride = cov.shape[0] + 1
247            cov.flat[self.intercept * stride::stride] += self.ridge_lambda
248
249        # adds some robustness by computing the pseudo inverse;
250        # normal inverse could fail due to the singularity of X.T * X
251        invcov = pinv(cov)
252        D = dot(invcov, X.T)
253        coefficients = dot(D, y)
254
255        mu_y, sigma_y = numpy.mean(y), numpy.std(y)
256        if m > 0:
257            cov_x = numpy.cov(X, rowvar=0)
258            # standardized coefficients
259            std_coefficients = sqrt(cov_x.diagonal()) / sigma_y * coefficients
260        else:
261            std_coefficients = None
262
263        # TODO: find inferential properties of the estimators for ridge
264        if self.compute_stats is False or self.ridge_lambda:
265            return LinearRegression(domain.class_var, domain,
266                coefficients=coefficients, std_coefficients=std_coefficients,
267                intercept=self.intercept)
268
269
270        fitted = dot(X, coefficients)
271        residuals = [ins.get_class() - fitted[i]
272                     for i, ins in enumerate(table)]
273
274        # model summary
275        # total sum of squares (total variance)
276        sst = numpy.sum((y - mu_y) ** 2)
277        # sum of squares due to regression (explained variance)
278        ssr = numpy.sum((fitted - mu_y) ** 2)
279        # error sum of squares (unexplaied variance)
280        sse = sst - ssr
281        # coefficient of determination
282        r2 = ssr / sst
283        r2adj = 1 - (1 - r2) * (n - 1) / (n - m - 1)
284        F = (ssr / m) / (sst - ssr / (n - m - 1)) if m else None
285        df = n - 2
286        sigma_square = sse / (n - m - 1)
287        # standard error of the regression estimator, t-scores and p-values
288        std_error = sqrt(sigma_square * invcov.diagonal())
289        t_scores = coefficients / std_error
290        p_vals = [stats.betai(df * 0.5, 0.5, df / (df + t * t))
291                  for t in t_scores]
292
293        # dictionary of regression coefficients with standard errors
294        # and p-values
295        dict_model = {}
296        if self.intercept:
297            dict_model["Intercept"] = (coefficients[0], std_error[0],
298                                       t_scores[0], p_vals[0])
299        for i, var in enumerate(domain.features):
300            j = i + 1 if self.intercept else i
301            dict_model[var.name] = (coefficients[j], std_error[j],
302                                    t_scores[j], p_vals[j])
303
304        return LinearRegression(domain.class_var, domain, coefficients, F,
305                 std_error=std_error, t_scores=t_scores, p_vals=p_vals,
306                 dict_model=dict_model, fitted=fitted, residuals=residuals,
308                 ssr=ssr, std_coefficients=std_coefficients,
309                 intercept=self.intercept)
310
311deprecated_members({"ridgeLambda": "ridge_lambda",
312                    "computeStats": "compute_stats",
313                    "useVars": "use_vars",
315                    "removeSig": "remove_sig",
316                    }
317                   , ["__init__"],
318                   in_place=True)(LinearRegressionLearner)
319
320class LinearRegression(Orange.classification.Classifier):
321
322    """Linear regression predicts value of the response variable
323    based on the values of independent variables.
324
325    .. attribute:: F
326
327        F-statistics of the model.
328
329    .. attribute:: coefficients
330
331        Regression coefficients stored in list. If the intercept is included
332        the first item corresponds to the estimated intercept.
333
334    .. attribute:: std_error
335
336        Standard errors of the coefficient estimator, stored in list.
337
338    .. attribute:: t_scores
339
340        List of t-scores for the estimated regression coefficients.
341
342    .. attribute:: p_vals
343
344        List of p-values for the null hypothesis that the regression
345        coefficients equal 0 based on t-scores and two sided
346        alternative hypothesis.
347
348    .. attribute:: dict_model
349
350        Statistical properties of the model in a dictionary:
351        Keys - names of the independent variables (or "Intercept")
352        Values - tuples (coefficient, standard error,
353        t-value, p-value)
354
355    .. attribute:: fitted
356
357        Estimated values of the dependent variable for all instances
358        from the training table.
359
360    .. attribute:: residuals
361
362        Differences between estimated and actual values of the
363        dependent variable for all instances from the training table.
364
365    .. attribute:: m
366
367        Number of independent (predictor) variables.
368
369    .. attribute:: n
370
371        Number of instances.
372
373    .. attribute:: mu_y
374
375        Sample mean of the dependent variable.
376
377    .. attribute:: r2
378
379        `Coefficient of determination
380        <http://en.wikipedia.org/wiki/Coefficient_of_determination>`_.
381
382
384
386
387    .. attribute:: sst, sse, ssr
388
389        Total sum of squares, explained sum of squares and
390        residual sum of squares respectively.
391
392    .. attribute:: std_coefficients
393
394        Standardized regression coefficients.
395
396    """
397
398
399
400    def __init__(self, class_var=None, domain=None, coefficients=None, F=None,
401                 std_error=None, t_scores=None, p_vals=None, dict_model=None,
402                 fitted=None, residuals=None, m=None, n=None, mu_y=None,
403                 r2=None, r2adj=None, sst=None, sse=None, ssr=None,
404                 std_coefficients=None, intercept=None):
405        """
406        :param model: fitted linear regression model
407        :type model: :class:`LinearRegressionLearner`
408        """
409        self.class_var = class_var
410        self.domain = domain
411        self.coefficients = coefficients
412        self.F = F
413        self.std_error = std_error
414        self.t_scores = t_scores
415        self.p_vals = p_vals
416        self.dict_model = dict_model
417        self.fitted = fitted
418        self.residuals = residuals
419        self.m = m
420        self.n = n
421        self.mu_y = mu_y
422        self.r2 = r2
424        self.sst = sst
425        self.sse = sse
426        self.ssr = ssr
427        self.std_coefficients = std_coefficients
428        self.intercept = intercept
429
430    def __call__(self, instance,
431                 result_type=Orange.classification.Classifier.GetValue):
432        """
433        :param instance: data instance for which the value of the response
434                         variable will be predicted
435        :type instance: :obj:`~Orange.data.Instance`
436        """
437        ins = Orange.data.Instance(self.domain, instance)
438        ins = numpy.array(ins.native())
439        if "?" in ins: # missing value -> corresponding coefficient omitted
440            def miss_2_0(x): return x if x != "?" else 0
441            ins = map(miss_2_0, ins)
442
443        if self.intercept:
444            if len(self.coefficients) > 1:
445                y_hat = self.coefficients[0] + dot(self.coefficients[1:],
446                                                   ins[:-1])
447            else:
448                if len(ins) == 1:
449                    print ins
450                    y_hat = self.mu_y
451                else:
452                    y_hat = dot(self.coefficients, ins[:-1])
453        else:
454            y_hat = dot(self.coefficients, ins[:-1])
455#        y_hat = Orange.data.Value(y_hat)
456        y_hat = self.class_var(y_hat)
457        dist = Orange.statistics.distribution.Continuous(self.class_var)
458        dist[y_hat] = 1.
459        if result_type == Orange.classification.Classifier.GetValue:
460            return y_hat
461        if result_type == Orange.classification.Classifier.GetProbabilities:
462            return dist
463        return (y_hat, dist)
464
465
466    def to_string(self):
467        """Pretty-prints linear regression model,
468        i.e. estimated regression coefficients with standard errors, t-scores
469        and significances.
470
471        """
472        from string import join
473        labels = ('Variable', 'Coeff Est', 'Std Error', 't-value', 'p')
474        lines = [join(['%10s' % l for l in labels], ' ')]
475
476        fmt = "%10s " + join(["%10.3f"] * 4, " ") + " %5s"
477        if not self.p_vals:
478            raise ValueError("Model does not contain model statistics.")
479        def get_star(p):
480            if p < 0.001: return  "*" * 3
481            elif p < 0.01: return "*" * 2
482            elif p < 0.05: return "*"
483            elif p < 0.1: return  "."
484            else: return " "
485
486        if self.intercept == True:
487            stars =  get_star(self.p_vals[0])
488            lines.append(fmt % ('Intercept', self.coefficients[0],
489                                self.std_error[0], self.t_scores[0],
490                                self.p_vals[0], stars))
491            for i in range(len(self.domain.attributes)):
492                stars = get_star(self.p_vals[i + 1])
493                lines.append(fmt % (self.domain.attributes[i].name,
494                             self.coefficients[i + 1], self.std_error[i + 1],
495                             self.t_scores[i + 1], self.p_vals[i + 1], stars))
496        else:
497            for i in range(len(self.domain.attributes)):
498                stars = get_star(self.p_vals[i])
499                lines.append(fmt % (self.domain.attributes[i].name,
500                             self.coefficients[i], self.std_error[i],
501                             self.t_scores[i], self.p_vals[i], stars))
502        lines.append("Signif. codes:  0 *** 0.001 ** 0.01 * 0.05 . 0.1 empty 1")
503        return "\n".join(lines)
504
505    def __str__(self):
506        return self.to_string()
507
508
509def compare_models(c1, c2):
510    """ Compares if classifiaction model c1 is significantly better
511    than model c2. The comparison is based on F-test, the p-value
512    is returned.
513
514    :param c1, c2: linear regression model objects.
515    :type lr: :class:`LinearRegression`
516
517    """
518    if c1 == None or c2 == None:
519        return 1.0
520    p1, p2, n = c1.m, c2.m, c1.n
522    if RSS1 <= RSS2 or p2 <= p1 or n <= p2 or RSS2 <= 0:
523        return 1.0
525    return stats.fprob(int(p2 - p1), int(n - p2), F)
526
527
530    """ Performs `stepwise linear regression
531    <http://en.wikipedia.org/wiki/Stepwise_regression>`_:
532    on table and returns the list of remaing independent variables
533    which fit a significant linear regression model.coefficients
534
535    :param table: data instances.
536    :type table: :class:`Orange.data.Table`
537    :param weight: the weights for instances. Default: None, i.e. all data
538        instances are eqaully important in fitting the regression parameters
539    :type weight: None or list of Orange.feature.Continuous
540        which stores the weights
541    :param add_sig: lower bound of significance for which the variable
542        is included in regression model
543        default value = 0.05
545    :param remove_sig: upper bound of significance for which the variable
546        is excluded from the regression model
547        default value = 0.2
548    :type remove_sig: float
549    """
550
551
552    inc_vars = []
553    not_inc_vars = table.domain.attributes
554
555    changed_model = True
556    while changed_model:
557        changed_model = False
558        # remove all unsignificant conditions (learn several models,
559        # where each time one variable is removed and check significance)
560        c0 = LinearRegressionLearner(table, use_vars=inc_vars)
561        reduced_model = [] # reduced model
562        for ati in range(len(inc_vars)):
563            try:
564                reduced_model.append(LinearRegressionLearner(table, weight,
565                        use_vars=inc_vars[:ati] + inc_vars[(ati + 1):]))
566            except Exception:
567                reduced_model.append(None)
568
569        sigs = [compare_models(r, c0) for r in reduced_model]
570        if sigs and max(sigs) > remove_sig:
571            # remove that variable, start again
572            crit_var = inc_vars[sigs.index(max(sigs))]
573            not_inc_vars.append(crit_var)
574            inc_vars.remove(crit_var)
575            changed_model = True
576            continue
577
578        # add all significant conditions (go through all attributes in
579        # not_inc_vars, is there one that significantly improves the model?
580        extended_model = []
581        for ati in range(len(not_inc_vars)):
582            try:
583                extended_model.append(LinearRegressionLearner(table,
584                        weight, use_vars=inc_vars + [not_inc_vars[ati]]))
585            except Exception:
586                extended_model.append(None)
587
588        sigs = [compare_models(c0, r) for r in extended_model]
589        if sigs and min(sigs) < add_sig:
590            best_var = not_inc_vars[sigs.index(min(sigs))]
591            inc_vars.append(best_var)
592            not_inc_vars.remove(best_var)
593            changed_model = True
594    return inc_vars
595
596
597if __name__ == "__main__":
598
599    import Orange
600    from Orange.regression import linear
601
602    table = Orange.data.Table("housing.tab")
603    c = LinearRegressionLearner(table)
604    print c
605
Note: See TracBrowser for help on using the repository browser.