Mercurial > hg > octave-nkf
annotate scripts/statistics/models/logistic_regression.m @ 9245:16f53d29049f
update copyright notices
author | John W. Eaton <jwe@octave.org> |
---|---|
date | Fri, 22 May 2009 10:46:00 -0400 |
parents | 1bf0ce0930be |
children | d1978e7364ad |
rev | line source |
---|---|
9245 | 1 ## Copyright (C) 1995, 1996, 1997, 1998, 1999, 2000, 2002, 2005, 2007, |
2 ## 2009 Kurt Hornik | |
3426 | 3 ## |
3922 | 4 ## This file is part of Octave. |
5 ## | |
6 ## Octave is free software; you can redistribute it and/or modify it | |
7 ## under the terms of the GNU General Public License as published by | |
7016 | 8 ## the Free Software Foundation; either version 3 of the License, or (at |
9 ## your option) any later version. | |
3426 | 10 ## |
3922 | 11 ## Octave is distributed in the hope that it will be useful, but |
3191 | 12 ## WITHOUT ANY WARRANTY; without even the implied warranty of |
13 ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
3426 | 14 ## General Public License for more details. |
15 ## | |
3191 | 16 ## You should have received a copy of the GNU General Public License |
7016 | 17 ## along with Octave; see the file COPYING. If not, see |
18 ## <http://www.gnu.org/licenses/>. | |
3191 | 19 |
3454 | 20 ## -*- texinfo -*- |
6754 | 21 ## @deftypefn {Function File} {[@var{theta}, @var{beta}, @var{dev}, @var{dl}, @var{d2l}, @var{p}] =} logistic_regression (@var{y}, @var{x}, @var{print}, @var{theta}, @var{beta}) |
3454 | 22 ## Perform ordinal logistic regression. |
3191 | 23 ## |
3454 | 24 ## Suppose @var{y} takes values in @var{k} ordered categories, and let |
25 ## @code{gamma_i (@var{x})} be the cumulative probability that @var{y} | |
26 ## falls in one of the first @var{i} categories given the covariate | |
27 ## @var{x}. Then | |
28 ## | |
29 ## @example | |
30 ## [theta, beta] = logistic_regression (y, x) | |
31 ## @end example | |
32 ## | |
33 ## @noindent | |
3191 | 34 ## fits the model |
3454 | 35 ## |
36 ## @example | |
9051
1bf0ce0930be
Grammar check TexInfo in all .m files
Rik <rdrider0-list@yahoo.com>
parents:
7031
diff
changeset
|
37 ## logit (gamma_i (x)) = theta_i - beta' * x, i = 1 @dots{} k-1 |
3454 | 38 ## @end example |
3191 | 39 ## |
3454 | 40 ## The number of ordinal categories, @var{k}, is taken to be the number |
41 ## of distinct values of @code{round (@var{y})}. If @var{k} equals 2, | |
42 ## @var{y} is binary and the model is ordinary logistic regression. The | |
43 ## matrix @var{x} is assumed to have full column rank. | |
44 ## | |
45 ## Given @var{y} only, @code{theta = logistic_regression (y)} | |
3191 | 46 ## fits the model with baseline logit odds only. |
47 ## | |
48 ## The full form is | |
3454 | 49 ## |
50 ## @example | |
9051
1bf0ce0930be
Grammar check TexInfo in all .m files
Rik <rdrider0-list@yahoo.com>
parents:
7031
diff
changeset
|
51 ## @group |
3454 | 52 ## [theta, beta, dev, dl, d2l, gamma] |
53 ## = logistic_regression (y, x, print, theta, beta) | |
9051
1bf0ce0930be
Grammar check TexInfo in all .m files
Rik <rdrider0-list@yahoo.com>
parents:
7031
diff
changeset
|
54 ## @end group |
3454 | 55 ## @end example |
56 ## | |
57 ## @noindent | |
58 ## in which all output arguments and all input arguments except @var{y} | |
59 ## are optional. | |
3191 | 60 ## |
6754 | 61 ## Setting @var{print} to 1 requests summary information about the fitted |
3454 | 62 ## model to be displayed. Setting @var{print} to 2 requests information |
63 ## about convergence at each iteration. Other values request no | |
64 ## information to be displayed. The input arguments @var{theta} and | |
65 ## @var{beta} give initial estimates for @var{theta} and @var{beta}. | |
66 ## | |
67 ## The returned value @var{dev} holds minus twice the log-likelihood. | |
3191 | 68 ## |
3454 | 69 ## The returned values @var{dl} and @var{d2l} are the vector of first |
70 ## and the matrix of second derivatives of the log-likelihood with | |
71 ## respect to @var{theta} and @var{beta}. | |
3191 | 72 ## |
3454 | 73 ## @var{p} holds estimates for the conditional distribution of @var{y} |
74 ## given @var{x}. | |
75 ## @end deftypefn | |
3191 | 76 |
3426 | 77 ## Original for MATLAB written by Gordon K Smyth <gks@maths.uq.oz.au>, |
3191 | 78 ## U of Queensland, Australia, on Nov 19, 1990. Last revision Aug 3, |
79 ## 1992. | |
80 | |
3456 | 81 ## Author: Gordon K Smyth <gks@maths.uq.oz.au>, |
5428 | 82 ## Adapted-By: KH <Kurt.Hornik@wu-wien.ac.at> |
3456 | 83 ## Description: Ordinal logistic regression |
3191 | 84 |
85 ## Uses the auxiliary functions logistic_regression_derivatives and | |
86 ## logistic_regression_likelihood. | |
87 | |
88 function [theta, beta, dev, dl, d2l, p] ... | |
3454 | 89 = logistic_regression (y, x, print, theta, beta) |
3426 | 90 |
3191 | 91 ## check input |
3426 | 92 y = round (vec (y)); |
93 [my, ny] = size (y); | |
3191 | 94 if (nargin < 2) |
3426 | 95 x = zeros (my, 0); |
3191 | 96 endif; |
3238 | 97 [mx, nx] = size (x); |
3191 | 98 if (mx != my) |
99 error ("x and y must have the same number of observations"); | |
100 endif | |
3426 | 101 |
3191 | 102 ## initial calculations |
103 x = -x; | |
104 tol = 1e-6; incr = 10; decr = 2; | |
105 ymin = min (y); ymax = max (y); yrange = ymax - ymin; | |
106 z = (y * ones (1, yrange)) == ((y * 0 + 1) * (ymin : (ymax - 1))); | |
107 z1 = (y * ones (1, yrange)) == ((y * 0 + 1) * ((ymin + 1) : ymax)); | |
3426 | 108 z = z(:, any (z)); |
109 z1 = z1 (:, any(z1)); | |
3238 | 110 [mz, nz] = size (z); |
3426 | 111 |
3191 | 112 ## starting values |
113 if (nargin < 3) | |
3426 | 114 print = 0; |
3191 | 115 endif; |
3426 | 116 if (nargin < 4) |
117 beta = zeros (nx, 1); | |
3191 | 118 endif; |
3426 | 119 if (nargin < 5) |
120 g = cumsum (sum (z))' ./ my; | |
121 theta = log (g ./ (1 - g)); | |
3191 | 122 endif; |
123 tb = [theta; beta]; | |
124 | |
125 ## likelihood and derivatives at starting values | |
126 [g, g1, p, dev] = logistic_regression_likelihood (y, x, tb, z, z1); | |
127 [dl, d2l] = logistic_regression_derivatives (x, z, z1, g, g1, p); | |
128 epsilon = std (vec (d2l)) / 1000; | |
3426 | 129 |
3191 | 130 ## maximize likelihood using Levenberg modified Newton's method |
131 iter = 0; | |
132 while (abs (dl' * (d2l \ dl) / length (dl)) > tol) | |
133 iter = iter + 1; | |
134 tbold = tb; | |
135 devold = dev; | |
136 tb = tbold - d2l \ dl; | |
137 [g, g1, p, dev] = logistic_regression_likelihood (y, x, tb, z, z1); | |
138 if ((dev - devold) / (dl' * (tb - tbold)) < 0) | |
139 epsilon = epsilon / decr; | |
140 else | |
141 while ((dev - devold) / (dl' * (tb - tbold)) > 0) | |
3426 | 142 epsilon = epsilon * incr; |
3191 | 143 if (epsilon > 1e+15) |
3426 | 144 error ("epsilon too large"); |
3191 | 145 endif |
3426 | 146 tb = tbold - (d2l - epsilon * eye (size (d2l))) \ dl; |
147 [g, g1, p, dev] = logistic_regression_likelihood (y, x, tb, z, z1); | |
3191 | 148 disp ("epsilon"); disp (epsilon); |
149 endwhile | |
150 endif | |
151 [dl, d2l] = logistic_regression_derivatives (x, z, z1, g, g1, p); | |
152 if (print == 2) | |
153 disp ("Iteration"); disp (iter); | |
154 disp ("Deviance"); disp (dev); | |
155 disp ("First derivative"); disp (dl'); | |
156 disp ("Eigenvalues of second derivative"); disp (eig (d2l)'); | |
157 endif | |
158 endwhile | |
159 | |
160 ## tidy up output | |
161 | |
162 theta = tb (1 : nz, 1); | |
163 beta = tb ((nz + 1) : (nz + nx), 1); | |
164 | |
165 if (print >= 1) | |
166 printf ("\n"); | |
167 printf ("Logistic Regression Results:\n"); | |
3426 | 168 printf ("\n"); |
3456 | 169 printf ("Number of Iterations: %d\n", iter); |
170 printf ("Deviance: %f\n", dev); | |
3191 | 171 printf ("Parameter Estimates:\n"); |
172 printf (" Theta S.E.\n"); | |
3426 | 173 se = sqrt (diag (inv (-d2l))); |
3191 | 174 for i = 1 : nz |
175 printf (" %8.4f %8.4f\n", tb (i), se (i)); | |
176 endfor | |
177 if (nx > 0) | |
178 printf (" Beta S.E.\n"); | |
179 for i = (nz + 1) : (nz + nx) | |
3426 | 180 printf (" %8.4f %8.4f\n", tb (i), se (i)); |
3191 | 181 endfor |
182 endif | |
183 endif | |
184 | |
185 if (nargout == 6) | |
186 if (nx > 0) | |
187 e = ((x * beta) * ones (1, nz)) + ((y * 0 + 1) * theta'); | |
188 else | |
189 e = (y * 0 + 1) * theta'; | |
190 endif | |
3238 | 191 gamma = diff ([(y * 0), (exp (e) ./ (1 + exp (e))), (y * 0 + 1)]')'; |
3191 | 192 endif |
3426 | 193 |
3191 | 194 endfunction |