間違いしかありません.コメントにてご指摘いただければ幸いです(気が付いた点を特に断りなく頻繁に書き直していますのでご注意ください).

単回帰モデルの最尤推定

単回帰モデルの最尤推定

単回帰モデル

$$ \begin{eqnarray} y_i&=&\alpha+\beta x_i+\epsilon_i \;(i=1,\cdots,n) \\&&\epsilon_i \overset{iid}{\sim} N(0,\sigma^2)\;\cdots\;独立同一分布(independent\;and\;identically\;distributed;\;IID,\;i.i.d.,\;iid) \end{eqnarray} $$

対数尤度凾数

対数尤度凾数は以下のようになる. $$ \begin{eqnarray} f(y_1,\cdots,y_n;\alpha,\beta,\sigma^2)&=&\prod_{i=1}^{n}\frac{1}{\sqrt{2\pi\sigma^2}}e^{-\frac{1}{2\sigma^2}\left(y_i-\alpha-\beta x_i\right)^2} \\&=&\left(2\pi\right)^{-\frac{n}{2}} \left(\sigma^2\right)^{-\frac{n}{2}} e^{-\frac{1}{2\sigma^2}\sum_{i=1}^{n}{\left(y_i-\alpha-\beta x_i\right)^2}} \\l(\alpha,\beta,\sigma^2;y_1,\cdots,y_n)&=&\log{\left\{ \left(2\pi\right)^{-\frac{n}{2}} \left(\sigma^2\right)^{-\frac{n}{2}} e^{-\frac{1}{2\sigma^2}\sum_{i=1}^{n}{\left(y_i-\alpha-\beta x_i\right)^2}} \right\}} \\&=&\log{\left\{ \left(2\pi\right)^{-\frac{n}{2}} \right\}} +\log{\left\{ \left(\sigma^2\right)^{-\frac{n}{2}} \right\}} +\log{\left\{ e^{-\frac{1}{2\sigma^2}\sum_{i=1}^{n}{\left(y_i-\alpha-\beta x_i\right)^2}} \right\}} \\&=&-\frac{n}{2}\log{\left(2\pi\right)} -\frac{n}{2}\log{\left(\sigma^2\right)} -\frac{1}{2\sigma^2} \sum_{i=1}^{n}{\left(y_i-\alpha-\beta x_i\right)^2} \end{eqnarray} $$

スコア凾数

スコア凾数は以下のようになる. $$ \begin{eqnarray} \frac{\partial l}{\partial \alpha} &=&\frac{\partial l}{\partial \alpha}\left\{-\frac{1}{2\sigma^2} \sum_{i=1}^{n}{\left(y_i-\alpha-\beta x_i\right)^2}\right\} \\&=&-\frac{1}{2\sigma^2} \sum_{i=1}^{n}{\frac{\partial l}{\partial \alpha}\left(y_i-\alpha-\beta x_i\right)^2} \\&=&-\frac{1}{2\sigma^2} \sum_{i=1}^{n}{\left(y_i-\alpha-\beta x_i\right)(-1)} \\&=&-\frac{-1}{2\sigma^2} \sum_{i=1}^{n}{\left(y_i-\alpha-\beta x_i\right)} \\&=&\frac{1}{2\sigma^2} \left(\sum_{i=1}^{n}y_i-\alpha\sum_{i=1}^{n}1-\beta\sum_{i=1}^{n} x_i\right) \\&=&\frac{1}{2\sigma^2} \left(n\bar{y}-n\alpha-n\beta\bar{x}\right) \;\cdots\;\bar{x}=\frac{1}{n}\sum_{i=1}^{n}x_i,\;\bar{y}=\frac{1}{n}\sum_{i=1}^{n}y_i \\&=&\frac{n}{2\sigma^2} \left(\bar{y}-\alpha-\beta\bar{x}\right) \end{eqnarray} $$ $$ \begin{eqnarray} \frac{\partial l}{\partial \beta} &=&\frac{\partial l}{\partial \beta}\left\{-\frac{1}{2\sigma^2} \sum_{i=1}^{n}{\left(y_i-\alpha-\beta x_i\right)^2}\right\} \\&=&-\frac{1}{2\sigma^2} \sum_{i=1}^{n}{\frac{\partial l}{\partial \beta}\left(y_i-\alpha-\beta x_i\right)^2} \\&=&-\frac{1}{2\sigma^2} \sum_{i=1}^{n}{2\left(y_i-\alpha-\beta x_i\right)(-x_i)} \\&=&-\frac{-2}{2\sigma^2} \sum_{i=1}^{n}{\left(x_iy_i-x_i\alpha-\beta x_i^2\right)} \\&=&\frac{1}{\sigma^2} \left(\sum_{i=1}^{n}x_iy_i-\sum_{i=1}^{n}x_i\alpha-\sum_{i=1}^{n}\beta x_i^2\right) \\&=&\frac{1}{\sigma^2} \left(\sum_{i=1}^{n}x_iy_i-n\bar{x}\alpha-\beta \sum_{i=1}^{n}x_i^2\right) \;\cdots\;\bar{x}=\frac{1}{n}\sum_{i=1}^{n}x_i \end{eqnarray} $$ $$ \begin{eqnarray} \frac{\partial l}{\partial \sigma^2} &=&\frac{\partial l}{\partial \sigma^2}\left\{ -\frac{n}{2}\log{\left(\sigma^2\right)} -\frac{1}{2\sigma^2} \sum_{i=1}^{n}{\left(y_i-\alpha-\beta x_i\right)^2} \right\} \\&=& -\frac{n}{2}\frac{\partial l}{\partial \sigma^2}\log{\left(\sigma^2\right)} -\frac{1}{2}\left\{\sum_{i=1}^{n}{\left(y_i-\alpha-\beta x_i\right)^2}\right\}\frac{\partial l}{\partial \sigma^2}\frac{1}{\sigma^2} \\&=& -\frac{n}{2}\frac{\partial l}{\partial \sigma^2}\log{\left(\sigma^2\right)} -\frac{1}{2}\left\{\sum_{i=1}^{n}{\left(y_i-\alpha-\beta x_i\right)^2}\right\}\frac{\partial l}{\partial u}\frac{1}{u} \;\cdots\;u=\sigma^2 \\&=& -\frac{n}{2}\frac{\partial l}{\partial \sigma^2}\log{\left(\sigma^2\right)} -\frac{1}{2}\left\{\sum_{i=1}^{n}{\left(y_i-\alpha-\beta x_i\right)^2}\right\}\left(\frac{-1}{u^2}\right) \\&=& -\frac{n}{2}\frac{1}{\sigma^2} -\frac{1}{2}\left\{\sum_{i=1}^{n}{\left(y_i-\alpha-\beta x_i\right)^2}\right\}\left(\frac{-1}{\sigma^4}\right) \;\cdots\;u=\sigma^2 \\&=& -\frac{1}{2\sigma^2}\left\{ n-\frac{1}{\sigma^2}\sum_{i=1}^{n}{\left(y_i-\alpha-\beta x_i\right)^2} \right\} \end{eqnarray} $$

スコア凾数を連立させる

$$ \begin{eqnarray} \left\{\begin{array}{rcl} \;0&=&\frac{\partial l}{\partial \alpha} \\0&=&\frac{\partial l}{\partial \beta} \\0&=&\frac{\partial l}{\partial \sigma^2} \end{array}\right. \end{eqnarray} $$ $$ \begin{eqnarray} \left\{\begin{array}{rcl} \;0&=&\frac{n}{2\sigma^2} \left(\bar{y}-\alpha-\beta\bar{x}\right) \\0&=&\frac{1}{\sigma^2} \left(\sum_{i=1}^{n}x_iy_i-n\bar{x}\alpha-\beta \sum_{i=1}^{n}x_i^2\right) \\0&=&-\frac{1}{2\sigma^2}\left\{n-\frac{1}{\sigma^2}\sum_{i=1}^{n}{\left(y_i-\alpha-\beta x_i\right)^2}\right\} \end{array}\right. \end{eqnarray} $$ \(\alpha,\beta,\sigma^2\)の推定量を\(\hat{\alpha},\hat{\beta},\hat{\sigma}^2\)とし,\(\hat{\alpha},\hat{\beta},\hat{\sigma}^2\)の最尤推定量(maximum likelihood estimator)を\(\hat{\alpha}_{ML},\hat{\beta}_{ML},\hat{\sigma}^2_{ML}\)とする. $$ \begin{eqnarray} \left\{\begin{array}{rcl} \;0&=&\bar{y}-\hat{\alpha}_{ML}-\hat{\beta}_{ML}\bar{x} \\0&=&\sum_{i=1}^{n}x_iy_i-n\bar{x}\hat{\alpha}_{ML}-\hat{\beta}_{ML} \sum_{i=1}^{n}x_i^2 \\0&=&n-\frac{1}{\sigma^2_{ML}}\sum_{i=1}^{n}{\left(y_i-\hat{\alpha}_{ML}-\hat{\beta}_{ML} x_i\right)^2} \end{array}\right. \end{eqnarray} $$

\(\hat{\alpha}_{ML}\)を求める

$$ \begin{eqnarray} 0&=&\bar{y}-\hat{\alpha}_{ML}-\hat{\beta}_{ML}\bar{x} \\\hat{\alpha}_{ML}&=&\bar{y}-\hat{\beta}_{ML}\bar{x} \end{eqnarray} $$

\(\hat{\beta}_{ML}\)を求める

$$ \begin{eqnarray} 0&=&\sum_{i=1}^{n}x_iy_i-n\bar{x}\hat{\alpha}_{ML}-\hat{\beta}_{ML} \sum_{i=1}^{n}x_i^2 \\&=&\sum_{i=1}^{n}x_iy_i-n\bar{x}\left(\bar{y}-\hat{\beta}_{ML}\bar{x}\right)-\hat{\beta}_{ML} \sum_{i=1}^{n}x_i^2 \;\cdots\;\hat{\alpha}_{ML}=\bar{y}-\hat{\beta}_{ML}\bar{x} \\&=&\sum_{i=1}^{n}x_iy_i-n\bar{x}\bar{y}+n\hat{\beta}_{ML}\bar{x}^2-\hat{\beta}_{ML} \sum_{i=1}^{n}x_i^2 \\&=&\sum_{i=1}^{n}x_iy_i-n\bar{x}\bar{y} -\hat{\beta}_{ML} \left\{ \left( \sum_{i=1}^{n}x_i^2 \right) - n\bar{x}^2\right\} \\&=&\sum_{i=1}^{n}\left(x_i-\bar{x}\right)\left(y_i-\bar{y}\right) -\hat{\beta}_{ML} \sum_{i=1}^{n}\left(x_i-\bar{x}\right)^2 \\&=&S_{xy} -\hat{\beta}_{ML}\;S_{xx} \;\cdots\;S_{xy}=\sum_{i=1}^{n}(x_i-\bar{x})(y_i-\bar{y}),\;S_{xx}=\sum_{i=1}^{n}(x_i-\bar{x})^2 \\\hat{\beta}_{ML}&=&\frac{S_{xy}}{S_{xx}} \\&=&\hat{\beta}\;\cdots\;\href{https://shikitenkai.blogspot.com/2020/03/blog-post.html}{正規方程式の解と同じ} \\\hat{\alpha}_{ML}&=&\bar{y}-\hat{\beta}_{ML}\bar{x} \\&=&\bar{y}-\hat{\beta}\bar{x} \\&=&\hat{\alpha}\;\cdots\;\href{https://shikitenkai.blogspot.com/2020/03/blog-post.html}{正規方程式の解と同じ} \end{eqnarray} $$

\(\hat{\sigma}^2_{ML}\)を求める

$$ \begin{eqnarray} 0&=&n-\frac{1}{\sigma^2_{ML}}\sum_{i=1}^{n}{\left(y_i-\hat{\alpha}_{ML}-\hat{\beta}_{ML} x_i\right)^2} \\-n&=&-\frac{1}{\sigma^2_{ML}}\sum_{i=1}^{n}{\left(y_i-\hat{\alpha}_{ML}-\hat{\beta}_{ML} x_i\right)^2} \\-n\sigma^2_{ML}&=&-\sum_{i=1}^{n}{\left(y_i-\hat{\alpha}_{ML}-\hat{\beta}_{ML} x_i\right)^2} \\\hat{\sigma}^2_{ML} &=&\frac{1}{n}\sum_{i=1}^{n}{\left(y_i-\hat{\alpha}_{ML}-\hat{\beta}_{ML} x_i\right)^2} \\&=&\frac{1}{n}\sum_{i=1}^{n}{\left(y_i-\hat{\alpha}-\hat{\beta} x_i\right)^2} \\&=&\frac{1}{n}\sum_{i=1}^{n}{\left(y_i-\hat{y}_i\right)^2} \;\cdots\;\hat{y}_i=\hat{\alpha}+\hat{\beta} x_i \\&=&\frac{1}{n}\sum_{i=1}^{n}{e_i^2} \\&=&\frac{1}{n}(n-2)s^2 \;\cdots\;\href{https://shikitenkai.blogspot.com/2020/09/blog-post.html}{\sum_{i=1}^{n}{e_i^2}=(n-2)s^2,\;s^2=\frac{1}{n-2}\sum_{i=1}^{n}{e_i^2}} \\&=&\frac{n-2}{n}s^2 \end{eqnarray} $$

\(\hat{\alpha}_{ML},\hat{\beta}_{ML},\hat{\sigma}^2_{ML}\)

以上より最尤推定量\(\hat{\alpha}_{ML},\hat{\beta}_{ML},\hat{\sigma}^2_{ML}\)は以下のようになる. $$ \begin{eqnarray} \hat{\beta}_{ML}&=&\frac{S_{xy}}{S_{xx}}=\hat{\beta} \\\hat{\alpha}_{ML}&=&\bar{y}-\hat{\beta}_{ML}\bar{x}=\bar{y}-\hat{\beta}\bar{x}=\hat{\alpha} \\\hat{\sigma}_{ML}^2&=&\frac{n-2}{n}s^2 \end{eqnarray} $$

0 件のコメント:

コメントを投稿