線形回帰モデル-Boston Housing Data

1.必要モジュールとデータのインポート

In [126]:
from sklearn.datasets import load_boston
from pandas import DataFrame
import numpy as np
In [127]:
#ボストンデータを"boston"というインスタンスにインポート
boston = load_boston()
/usr/local/lib/python3.7/dist-packages/sklearn/utils/deprecation.py:87: FutureWarning: Function load_boston is deprecated; `load_boston` is deprecated in 1.0 and will be removed in 1.2.

    The Boston housing prices dataset has an ethical problem. You can refer to
    the documentation of this function for further details.

    The scikit-learn maintainers therefore strongly discourage the use of this
    dataset unless the purpose of the code is to study and educate about
    ethical issues in data science and machine learning.

    In this special case, you can fetch the dataset from the original
    source::

        import pandas as pd
        import numpy as np


        data_url = "http://lib.stat.cmu.edu/datasets/boston"
        raw_df = pd.read_csv(data_url, sep="\s+", skiprows=22, header=None)
        data = np.hstack([raw_df.values[::2, :], raw_df.values[1::2, :2]])
        target = raw_df.values[1::2, 2]

    Alternative datasets include the California housing dataset (i.e.
    :func:`~sklearn.datasets.fetch_california_housing`) and the Ames housing
    dataset. You can load the datasets as follows::

        from sklearn.datasets import fetch_california_housing
        housing = fetch_california_housing()

    for the California housing dataset and::

        from sklearn.datasets import fetch_openml
        housing = fetch_openml(name="house_prices", as_frame=True)

    for the Ames housing dataset.
    
  warnings.warn(msg, category=FutureWarning)

なにかワーニングが出ているが読めている模様。 オリジナルデータ欲しいならURLにアクセスせよという警告? ひとまず問題なさそうなので進めることにする。

In [128]:
#インポートしたデータを確認
print(boston)
{'data': array([[6.3200e-03, 1.8000e+01, 2.3100e+00, ..., 1.5300e+01, 3.9690e+02,
        4.9800e+00],
       [2.7310e-02, 0.0000e+00, 7.0700e+00, ..., 1.7800e+01, 3.9690e+02,
        9.1400e+00],
       [2.7290e-02, 0.0000e+00, 7.0700e+00, ..., 1.7800e+01, 3.9283e+02,
        4.0300e+00],
       ...,
       [6.0760e-02, 0.0000e+00, 1.1930e+01, ..., 2.1000e+01, 3.9690e+02,
        5.6400e+00],
       [1.0959e-01, 0.0000e+00, 1.1930e+01, ..., 2.1000e+01, 3.9345e+02,
        6.4800e+00],
       [4.7410e-02, 0.0000e+00, 1.1930e+01, ..., 2.1000e+01, 3.9690e+02,
        7.8800e+00]]), 'target': array([24. , 21.6, 34.7, 33.4, 36.2, 28.7, 22.9, 27.1, 16.5, 18.9, 15. ,
       18.9, 21.7, 20.4, 18.2, 19.9, 23.1, 17.5, 20.2, 18.2, 13.6, 19.6,
       15.2, 14.5, 15.6, 13.9, 16.6, 14.8, 18.4, 21. , 12.7, 14.5, 13.2,
       13.1, 13.5, 18.9, 20. , 21. , 24.7, 30.8, 34.9, 26.6, 25.3, 24.7,
       21.2, 19.3, 20. , 16.6, 14.4, 19.4, 19.7, 20.5, 25. , 23.4, 18.9,
       35.4, 24.7, 31.6, 23.3, 19.6, 18.7, 16. , 22.2, 25. , 33. , 23.5,
       19.4, 22. , 17.4, 20.9, 24.2, 21.7, 22.8, 23.4, 24.1, 21.4, 20. ,
       20.8, 21.2, 20.3, 28. , 23.9, 24.8, 22.9, 23.9, 26.6, 22.5, 22.2,
       23.6, 28.7, 22.6, 22. , 22.9, 25. , 20.6, 28.4, 21.4, 38.7, 43.8,
       33.2, 27.5, 26.5, 18.6, 19.3, 20.1, 19.5, 19.5, 20.4, 19.8, 19.4,
       21.7, 22.8, 18.8, 18.7, 18.5, 18.3, 21.2, 19.2, 20.4, 19.3, 22. ,
       20.3, 20.5, 17.3, 18.8, 21.4, 15.7, 16.2, 18. , 14.3, 19.2, 19.6,
       23. , 18.4, 15.6, 18.1, 17.4, 17.1, 13.3, 17.8, 14. , 14.4, 13.4,
       15.6, 11.8, 13.8, 15.6, 14.6, 17.8, 15.4, 21.5, 19.6, 15.3, 19.4,
       17. , 15.6, 13.1, 41.3, 24.3, 23.3, 27. , 50. , 50. , 50. , 22.7,
       25. , 50. , 23.8, 23.8, 22.3, 17.4, 19.1, 23.1, 23.6, 22.6, 29.4,
       23.2, 24.6, 29.9, 37.2, 39.8, 36.2, 37.9, 32.5, 26.4, 29.6, 50. ,
       32. , 29.8, 34.9, 37. , 30.5, 36.4, 31.1, 29.1, 50. , 33.3, 30.3,
       34.6, 34.9, 32.9, 24.1, 42.3, 48.5, 50. , 22.6, 24.4, 22.5, 24.4,
       20. , 21.7, 19.3, 22.4, 28.1, 23.7, 25. , 23.3, 28.7, 21.5, 23. ,
       26.7, 21.7, 27.5, 30.1, 44.8, 50. , 37.6, 31.6, 46.7, 31.5, 24.3,
       31.7, 41.7, 48.3, 29. , 24. , 25.1, 31.5, 23.7, 23.3, 22. , 20.1,
       22.2, 23.7, 17.6, 18.5, 24.3, 20.5, 24.5, 26.2, 24.4, 24.8, 29.6,
       42.8, 21.9, 20.9, 44. , 50. , 36. , 30.1, 33.8, 43.1, 48.8, 31. ,
       36.5, 22.8, 30.7, 50. , 43.5, 20.7, 21.1, 25.2, 24.4, 35.2, 32.4,
       32. , 33.2, 33.1, 29.1, 35.1, 45.4, 35.4, 46. , 50. , 32.2, 22. ,
       20.1, 23.2, 22.3, 24.8, 28.5, 37.3, 27.9, 23.9, 21.7, 28.6, 27.1,
       20.3, 22.5, 29. , 24.8, 22. , 26.4, 33.1, 36.1, 28.4, 33.4, 28.2,
       22.8, 20.3, 16.1, 22.1, 19.4, 21.6, 23.8, 16.2, 17.8, 19.8, 23.1,
       21. , 23.8, 23.1, 20.4, 18.5, 25. , 24.6, 23. , 22.2, 19.3, 22.6,
       19.8, 17.1, 19.4, 22.2, 20.7, 21.1, 19.5, 18.5, 20.6, 19. , 18.7,
       32.7, 16.5, 23.9, 31.2, 17.5, 17.2, 23.1, 24.5, 26.6, 22.9, 24.1,
       18.6, 30.1, 18.2, 20.6, 17.8, 21.7, 22.7, 22.6, 25. , 19.9, 20.8,
       16.8, 21.9, 27.5, 21.9, 23.1, 50. , 50. , 50. , 50. , 50. , 13.8,
       13.8, 15. , 13.9, 13.3, 13.1, 10.2, 10.4, 10.9, 11.3, 12.3,  8.8,
        7.2, 10.5,  7.4, 10.2, 11.5, 15.1, 23.2,  9.7, 13.8, 12.7, 13.1,
       12.5,  8.5,  5. ,  6.3,  5.6,  7.2, 12.1,  8.3,  8.5,  5. , 11.9,
       27.9, 17.2, 27.5, 15. , 17.2, 17.9, 16.3,  7. ,  7.2,  7.5, 10.4,
        8.8,  8.4, 16.7, 14.2, 20.8, 13.4, 11.7,  8.3, 10.2, 10.9, 11. ,
        9.5, 14.5, 14.1, 16.1, 14.3, 11.7, 13.4,  9.6,  8.7,  8.4, 12.8,
       10.5, 17.1, 18.4, 15.4, 10.8, 11.8, 14.9, 12.6, 14.1, 13. , 13.4,
       15.2, 16.1, 17.8, 14.9, 14.1, 12.7, 13.5, 14.9, 20. , 16.4, 17.7,
       19.5, 20.2, 21.4, 19.9, 19. , 19.1, 19.1, 20.1, 19.9, 19.6, 23.2,
       29.8, 13.8, 13.3, 16.7, 12. , 14.6, 21.4, 23. , 23.7, 25. , 21.8,
       20.6, 21.2, 19.1, 20.6, 15.2,  7. ,  8.1, 13.6, 20.1, 21.8, 24.5,
       23.1, 19.7, 18.3, 21.2, 17.5, 16.8, 22.4, 20.6, 23.9, 22. , 11.9]), 'feature_names': array(['CRIM', 'ZN', 'INDUS', 'CHAS', 'NOX', 'RM', 'AGE', 'DIS', 'RAD',
       'TAX', 'PTRATIO', 'B', 'LSTAT'], dtype='<U7'), 'DESCR': ".. _boston_dataset:\n\nBoston house prices dataset\n---------------------------\n\n**Data Set Characteristics:**  \n\n    :Number of Instances: 506 \n\n    :Number of Attributes: 13 numeric/categorical predictive. Median Value (attribute 14) is usually the target.\n\n    :Attribute Information (in order):\n        - CRIM     per capita crime rate by town\n        - ZN       proportion of residential land zoned for lots over 25,000 sq.ft.\n        - INDUS    proportion of non-retail business acres per town\n        - CHAS     Charles River dummy variable (= 1 if tract bounds river; 0 otherwise)\n        - NOX      nitric oxides concentration (parts per 10 million)\n        - RM       average number of rooms per dwelling\n        - AGE      proportion of owner-occupied units built prior to 1940\n        - DIS      weighted distances to five Boston employment centres\n        - RAD      index of accessibility to radial highways\n        - TAX      full-value property-tax rate per $10,000\n        - PTRATIO  pupil-teacher ratio by town\n        - B        1000(Bk - 0.63)^2 where Bk is the proportion of black people by town\n        - LSTAT    % lower status of the population\n        - MEDV     Median value of owner-occupied homes in $1000's\n\n    :Missing Attribute Values: None\n\n    :Creator: Harrison, D. and Rubinfeld, D.L.\n\nThis is a copy of UCI ML housing dataset.\nhttps://archive.ics.uci.edu/ml/machine-learning-databases/housing/\n\n\nThis dataset was taken from the StatLib library which is maintained at Carnegie Mellon University.\n\nThe Boston house-price data of Harrison, D. and Rubinfeld, D.L. 'Hedonic\nprices and the demand for clean air', J. Environ. Economics & Management,\nvol.5, 81-102, 1978.   Used in Belsley, Kuh & Welsch, 'Regression diagnostics\n...', Wiley, 1980.   N.B. Various transformations are used in the table on\npages 244-261 of the latter.\n\nThe Boston house-price data has been used in many machine learning papers that address regression\nproblems.   \n     \n.. topic:: References\n\n   - Belsley, Kuh & Welsch, 'Regression diagnostics: Identifying Influential Data and Sources of Collinearity', Wiley, 1980. 244-261.\n   - Quinlan,R. (1993). Combining Instance-Based and Model-Based Learning. In Proceedings on the Tenth International Conference of Machine Learning, 236-243, University of Massachusetts, Amherst. Morgan Kaufmann.\n", 'filename': 'boston_house_prices.csv', 'data_module': 'sklearn.datasets.data'}
In [129]:
#DESCR変数の中身を確認
#説明を表示できる
print(boston['DESCR'])
.. _boston_dataset:

Boston house prices dataset
---------------------------

**Data Set Characteristics:**  

    :Number of Instances: 506 

    :Number of Attributes: 13 numeric/categorical predictive. Median Value (attribute 14) is usually the target.

    :Attribute Information (in order):
        - CRIM     per capita crime rate by town
        - ZN       proportion of residential land zoned for lots over 25,000 sq.ft.
        - INDUS    proportion of non-retail business acres per town
        - CHAS     Charles River dummy variable (= 1 if tract bounds river; 0 otherwise)
        - NOX      nitric oxides concentration (parts per 10 million)
        - RM       average number of rooms per dwelling
        - AGE      proportion of owner-occupied units built prior to 1940
        - DIS      weighted distances to five Boston employment centres
        - RAD      index of accessibility to radial highways
        - TAX      full-value property-tax rate per $10,000
        - PTRATIO  pupil-teacher ratio by town
        - B        1000(Bk - 0.63)^2 where Bk is the proportion of black people by town
        - LSTAT    % lower status of the population
        - MEDV     Median value of owner-occupied homes in $1000's

    :Missing Attribute Values: None

    :Creator: Harrison, D. and Rubinfeld, D.L.

This is a copy of UCI ML housing dataset.
https://archive.ics.uci.edu/ml/machine-learning-databases/housing/


This dataset was taken from the StatLib library which is maintained at Carnegie Mellon University.

The Boston house-price data of Harrison, D. and Rubinfeld, D.L. 'Hedonic
prices and the demand for clean air', J. Environ. Economics & Management,
vol.5, 81-102, 1978.   Used in Belsley, Kuh & Welsch, 'Regression diagnostics
...', Wiley, 1980.   N.B. Various transformations are used in the table on
pages 244-261 of the latter.

The Boston house-price data has been used in many machine learning papers that address regression
problems.   
     
.. topic:: References

   - Belsley, Kuh & Welsch, 'Regression diagnostics: Identifying Influential Data and Sources of Collinearity', Wiley, 1980. 244-261.
   - Quinlan,R. (1993). Combining Instance-Based and Model-Based Learning. In Proceedings on the Tenth International Conference of Machine Learning, 236-243, University of Massachusetts, Amherst. Morgan Kaufmann.

In [130]:
#feature_names変数の中身を確認
#カラム名
print(boston['feature_names'])
['CRIM' 'ZN' 'INDUS' 'CHAS' 'NOX' 'RM' 'AGE' 'DIS' 'RAD' 'TAX' 'PTRATIO'
 'B' 'LSTAT']
In [131]:
#data変数(説明変数)の中身を確認
print(boston['data'])
[[6.3200e-03 1.8000e+01 2.3100e+00 ... 1.5300e+01 3.9690e+02 4.9800e+00]
 [2.7310e-02 0.0000e+00 7.0700e+00 ... 1.7800e+01 3.9690e+02 9.1400e+00]
 [2.7290e-02 0.0000e+00 7.0700e+00 ... 1.7800e+01 3.9283e+02 4.0300e+00]
 ...
 [6.0760e-02 0.0000e+00 1.1930e+01 ... 2.1000e+01 3.9690e+02 5.6400e+00]
 [1.0959e-01 0.0000e+00 1.1930e+01 ... 2.1000e+01 3.9345e+02 6.4800e+00]
 [4.7410e-02 0.0000e+00 1.1930e+01 ... 2.1000e+01 3.9690e+02 7.8800e+00]]
In [132]:
#target変数(目的変数)の中身を確認
#明らかにおかしいデータがある場合は前処理が必要
# ヒストグラムに直す,max,minを確認する
# 上限がかかっている場合もある
print(boston['target'])
[24.  21.6 34.7 33.4 36.2 28.7 22.9 27.1 16.5 18.9 15.  18.9 21.7 20.4
 18.2 19.9 23.1 17.5 20.2 18.2 13.6 19.6 15.2 14.5 15.6 13.9 16.6 14.8
 18.4 21.  12.7 14.5 13.2 13.1 13.5 18.9 20.  21.  24.7 30.8 34.9 26.6
 25.3 24.7 21.2 19.3 20.  16.6 14.4 19.4 19.7 20.5 25.  23.4 18.9 35.4
 24.7 31.6 23.3 19.6 18.7 16.  22.2 25.  33.  23.5 19.4 22.  17.4 20.9
 24.2 21.7 22.8 23.4 24.1 21.4 20.  20.8 21.2 20.3 28.  23.9 24.8 22.9
 23.9 26.6 22.5 22.2 23.6 28.7 22.6 22.  22.9 25.  20.6 28.4 21.4 38.7
 43.8 33.2 27.5 26.5 18.6 19.3 20.1 19.5 19.5 20.4 19.8 19.4 21.7 22.8
 18.8 18.7 18.5 18.3 21.2 19.2 20.4 19.3 22.  20.3 20.5 17.3 18.8 21.4
 15.7 16.2 18.  14.3 19.2 19.6 23.  18.4 15.6 18.1 17.4 17.1 13.3 17.8
 14.  14.4 13.4 15.6 11.8 13.8 15.6 14.6 17.8 15.4 21.5 19.6 15.3 19.4
 17.  15.6 13.1 41.3 24.3 23.3 27.  50.  50.  50.  22.7 25.  50.  23.8
 23.8 22.3 17.4 19.1 23.1 23.6 22.6 29.4 23.2 24.6 29.9 37.2 39.8 36.2
 37.9 32.5 26.4 29.6 50.  32.  29.8 34.9 37.  30.5 36.4 31.1 29.1 50.
 33.3 30.3 34.6 34.9 32.9 24.1 42.3 48.5 50.  22.6 24.4 22.5 24.4 20.
 21.7 19.3 22.4 28.1 23.7 25.  23.3 28.7 21.5 23.  26.7 21.7 27.5 30.1
 44.8 50.  37.6 31.6 46.7 31.5 24.3 31.7 41.7 48.3 29.  24.  25.1 31.5
 23.7 23.3 22.  20.1 22.2 23.7 17.6 18.5 24.3 20.5 24.5 26.2 24.4 24.8
 29.6 42.8 21.9 20.9 44.  50.  36.  30.1 33.8 43.1 48.8 31.  36.5 22.8
 30.7 50.  43.5 20.7 21.1 25.2 24.4 35.2 32.4 32.  33.2 33.1 29.1 35.1
 45.4 35.4 46.  50.  32.2 22.  20.1 23.2 22.3 24.8 28.5 37.3 27.9 23.9
 21.7 28.6 27.1 20.3 22.5 29.  24.8 22.  26.4 33.1 36.1 28.4 33.4 28.2
 22.8 20.3 16.1 22.1 19.4 21.6 23.8 16.2 17.8 19.8 23.1 21.  23.8 23.1
 20.4 18.5 25.  24.6 23.  22.2 19.3 22.6 19.8 17.1 19.4 22.2 20.7 21.1
 19.5 18.5 20.6 19.  18.7 32.7 16.5 23.9 31.2 17.5 17.2 23.1 24.5 26.6
 22.9 24.1 18.6 30.1 18.2 20.6 17.8 21.7 22.7 22.6 25.  19.9 20.8 16.8
 21.9 27.5 21.9 23.1 50.  50.  50.  50.  50.  13.8 13.8 15.  13.9 13.3
 13.1 10.2 10.4 10.9 11.3 12.3  8.8  7.2 10.5  7.4 10.2 11.5 15.1 23.2
  9.7 13.8 12.7 13.1 12.5  8.5  5.   6.3  5.6  7.2 12.1  8.3  8.5  5.
 11.9 27.9 17.2 27.5 15.  17.2 17.9 16.3  7.   7.2  7.5 10.4  8.8  8.4
 16.7 14.2 20.8 13.4 11.7  8.3 10.2 10.9 11.   9.5 14.5 14.1 16.1 14.3
 11.7 13.4  9.6  8.7  8.4 12.8 10.5 17.1 18.4 15.4 10.8 11.8 14.9 12.6
 14.1 13.  13.4 15.2 16.1 17.8 14.9 14.1 12.7 13.5 14.9 20.  16.4 17.7
 19.5 20.2 21.4 19.9 19.  19.1 19.1 20.1 19.9 19.6 23.2 29.8 13.8 13.3
 16.7 12.  14.6 21.4 23.  23.7 25.  21.8 20.6 21.2 19.1 20.6 15.2  7.
  8.1 13.6 20.1 21.8 24.5 23.1 19.7 18.3 21.2 17.5 16.8 22.4 20.6 23.9
 22.  11.9]

2.データフレームの作成

In [133]:
#説明変数らをDataFrameへ変換
df = DataFrame(data=boston.data, columns=boston.feature_names)
In [134]:
#目的変数をDataFrameへ追加
df['PRICE'] = np.array(boston.target)
In [135]:
#最初の12行を表示
#ぱっと見で偏りなど気になる場合は表示数を増やして確認してみる
df.head(12)
Out[135]:
CRIM ZN INDUS CHAS NOX RM AGE DIS RAD TAX PTRATIO B LSTAT PRICE
0 0.00632 18.0 2.31 0.0 0.538 6.575 65.2 4.0900 1.0 296.0 15.3 396.90 4.98 24.0
1 0.02731 0.0 7.07 0.0 0.469 6.421 78.9 4.9671 2.0 242.0 17.8 396.90 9.14 21.6
2 0.02729 0.0 7.07 0.0 0.469 7.185 61.1 4.9671 2.0 242.0 17.8 392.83 4.03 34.7
3 0.03237 0.0 2.18 0.0 0.458 6.998 45.8 6.0622 3.0 222.0 18.7 394.63 2.94 33.4
4 0.06905 0.0 2.18 0.0 0.458 7.147 54.2 6.0622 3.0 222.0 18.7 396.90 5.33 36.2
5 0.02985 0.0 2.18 0.0 0.458 6.430 58.7 6.0622 3.0 222.0 18.7 394.12 5.21 28.7
6 0.08829 12.5 7.87 0.0 0.524 6.012 66.6 5.5605 5.0 311.0 15.2 395.60 12.43 22.9
7 0.14455 12.5 7.87 0.0 0.524 6.172 96.1 5.9505 5.0 311.0 15.2 396.90 19.15 27.1
8 0.21124 12.5 7.87 0.0 0.524 5.631 100.0 6.0821 5.0 311.0 15.2 386.63 29.93 16.5
9 0.17004 12.5 7.87 0.0 0.524 6.004 85.9 6.5921 5.0 311.0 15.2 386.71 17.10 18.9
10 0.22489 12.5 7.87 0.0 0.524 6.377 94.3 6.3467 5.0 311.0 15.2 392.52 20.45 15.0
11 0.11747 12.5 7.87 0.0 0.524 6.009 82.9 6.2267 5.0 311.0 15.2 396.90 13.27 18.9

線形単回帰分析

In [136]:
#カラムを指定してデータを表示
#部屋数だけで回帰分析する
df[['RM']].head(12)
Out[136]:
RM
0 6.575
1 6.421
2 7.185
3 6.998
4 7.147
5 6.430
6 6.012
7 6.172
8 5.631
9 6.004
10 6.377
11 6.009
In [137]:
#説明変数
data = df.loc[:,['RM']].values
In [138]:
#dataリストの表示
data[0:5]
Out[138]:
array([[6.575],
       [6.421],
       [7.185],
       [6.998],
       [7.147]])
In [139]:
#目的変数
target = df.loc[:,'PRICE'].values
In [140]:
target[0:5]
Out[140]:
array([24. , 21.6, 34.7, 33.4, 36.2])
In [141]:
#sklearnモジュールからLinearRegressionをインポート
from sklearn.linear_model import LinearRegression
In [142]:
#オブジェクト生成
model = LinearRegression()
model.get_params()
Out[142]:
{'copy_X': True,
 'fit_intercept': True,
 'n_jobs': None,
 'normalize': 'deprecated',
 'positive': False}
In [143]:
# fit関数でパラメータ推定
model.fit(data,target)
Out[143]:
LinearRegression()
In [144]:
#予測
model.predict([[1]])
Out[144]:
array([-25.5685118])

価格がマイナスになるのはあり得ない ⇒モデルの設定をミスっている可能性が高い

重回帰分析(2変数)

In [145]:
#カラムを指定してデータを表示
In [146]:
df[['CRIM','RM']].head()
Out[146]:
CRIM RM
0 0.00632 6.575
1 0.02731 6.421
2 0.02729 7.185
3 0.03237 6.998
4 0.06905 7.147
In [147]:
#説明変数
data2 = df.loc[:,['CRIM','RM']].values
#目的変数
target2 = df.loc[:,'PRICE'].values
In [148]:
data2
Out[148]:
array([[6.3200e-03, 6.5750e+00],
       [2.7310e-02, 6.4210e+00],
       [2.7290e-02, 7.1850e+00],
       ...,
       [6.0760e-02, 6.9760e+00],
       [1.0959e-01, 6.7940e+00],
       [4.7410e-02, 6.0300e+00]])
In [149]:
#オブジェクト生成
model2 = LinearRegression()
In [150]:
#fit関数でパラメータ推定
model2.fit(data2, target2)
Out[150]:
LinearRegression()
In [151]:
#fit関数でパラメータ推定
model2.predict([[0.2,7]])
Out[151]:
array([29.43977562])

回帰係数と切片の値を確認

In [152]:
#重回帰の回帰係数と切片を確認
print(model2.coef_)
print(model2.intercept_)
[-0.26491325  8.39106825]
-29.24471945192992

課題:部屋数が4で犯罪率0.3の物件を予測する

In [153]:
model2.predict([[0.3,4]])
Out[153]:
array([4.24007956])

推定結果をヒートマップで可視化してみる

犯罪率よりも部屋数の方が圧倒的に価格への影響が大きいことがわかる。
というかやった後に気づいたけど線形だとグラフ化するまでもなく傾き見ればわかるか

In [154]:
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
%matplotlib inline

crime = np.arange(0.0, 1.0,0.1)
room = np.arange(1, 11, 1)
mesh_c, mesh_r = np.meshgrid(crime, room)
mesh_c = mesh_c.flatten()
mesh_r = mesh_r.flatten()
mesh_x = np.stack([mesh_c, mesh_r], 1)

mesh_y = model2.predict(mesh_x).reshape(10,10)

fig, ax = plt.subplots()
im = ax.imshow(mesh_y)
fig.colorbar(im, ax = ax)
plt.show()
In [155]:
mesh_r
Out[155]:
array([ 1,  1,  1,  1,  1,  1,  1,  1,  1,  1,  2,  2,  2,  2,  2,  2,  2,
        2,  2,  2,  3,  3,  3,  3,  3,  3,  3,  3,  3,  3,  4,  4,  4,  4,
        4,  4,  4,  4,  4,  4,  5,  5,  5,  5,  5,  5,  5,  5,  5,  5,  6,
        6,  6,  6,  6,  6,  6,  6,  6,  6,  7,  7,  7,  7,  7,  7,  7,  7,
        7,  7,  8,  8,  8,  8,  8,  8,  8,  8,  8,  8,  9,  9,  9,  9,  9,
        9,  9,  9,  9,  9, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10])
In [156]:
mesh_c
Out[156]:
array([0. , 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0. , 0.1, 0.2,
       0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0. , 0.1, 0.2, 0.3, 0.4, 0.5,
       0.6, 0.7, 0.8, 0.9, 0. , 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8,
       0.9, 0. , 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0. , 0.1,
       0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0. , 0.1, 0.2, 0.3, 0.4,
       0.5, 0.6, 0.7, 0.8, 0.9, 0. , 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7,
       0.8, 0.9, 0. , 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0. ,
       0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9])
In [157]:
mesh_x
Out[157]:
array([[ 0. ,  1. ],
       [ 0.1,  1. ],
       [ 0.2,  1. ],
       [ 0.3,  1. ],
       [ 0.4,  1. ],
       [ 0.5,  1. ],
       [ 0.6,  1. ],
       [ 0.7,  1. ],
       [ 0.8,  1. ],
       [ 0.9,  1. ],
       [ 0. ,  2. ],
       [ 0.1,  2. ],
       [ 0.2,  2. ],
       [ 0.3,  2. ],
       [ 0.4,  2. ],
       [ 0.5,  2. ],
       [ 0.6,  2. ],
       [ 0.7,  2. ],
       [ 0.8,  2. ],
       [ 0.9,  2. ],
       [ 0. ,  3. ],
       [ 0.1,  3. ],
       [ 0.2,  3. ],
       [ 0.3,  3. ],
       [ 0.4,  3. ],
       [ 0.5,  3. ],
       [ 0.6,  3. ],
       [ 0.7,  3. ],
       [ 0.8,  3. ],
       [ 0.9,  3. ],
       [ 0. ,  4. ],
       [ 0.1,  4. ],
       [ 0.2,  4. ],
       [ 0.3,  4. ],
       [ 0.4,  4. ],
       [ 0.5,  4. ],
       [ 0.6,  4. ],
       [ 0.7,  4. ],
       [ 0.8,  4. ],
       [ 0.9,  4. ],
       [ 0. ,  5. ],
       [ 0.1,  5. ],
       [ 0.2,  5. ],
       [ 0.3,  5. ],
       [ 0.4,  5. ],
       [ 0.5,  5. ],
       [ 0.6,  5. ],
       [ 0.7,  5. ],
       [ 0.8,  5. ],
       [ 0.9,  5. ],
       [ 0. ,  6. ],
       [ 0.1,  6. ],
       [ 0.2,  6. ],
       [ 0.3,  6. ],
       [ 0.4,  6. ],
       [ 0.5,  6. ],
       [ 0.6,  6. ],
       [ 0.7,  6. ],
       [ 0.8,  6. ],
       [ 0.9,  6. ],
       [ 0. ,  7. ],
       [ 0.1,  7. ],
       [ 0.2,  7. ],
       [ 0.3,  7. ],
       [ 0.4,  7. ],
       [ 0.5,  7. ],
       [ 0.6,  7. ],
       [ 0.7,  7. ],
       [ 0.8,  7. ],
       [ 0.9,  7. ],
       [ 0. ,  8. ],
       [ 0.1,  8. ],
       [ 0.2,  8. ],
       [ 0.3,  8. ],
       [ 0.4,  8. ],
       [ 0.5,  8. ],
       [ 0.6,  8. ],
       [ 0.7,  8. ],
       [ 0.8,  8. ],
       [ 0.9,  8. ],
       [ 0. ,  9. ],
       [ 0.1,  9. ],
       [ 0.2,  9. ],
       [ 0.3,  9. ],
       [ 0.4,  9. ],
       [ 0.5,  9. ],
       [ 0.6,  9. ],
       [ 0.7,  9. ],
       [ 0.8,  9. ],
       [ 0.9,  9. ],
       [ 0. , 10. ],
       [ 0.1, 10. ],
       [ 0.2, 10. ],
       [ 0.3, 10. ],
       [ 0.4, 10. ],
       [ 0.5, 10. ],
       [ 0.6, 10. ],
       [ 0.7, 10. ],
       [ 0.8, 10. ],
       [ 0.9, 10. ]])
In [158]:
mesh_y
Out[158]:
array([[-20.85365121, -20.88014253, -20.90663386, -20.93312518,
        -20.95961651, -20.98610783, -21.01259916, -21.03909048,
        -21.06558181, -21.09207313],
       [-12.46258296, -12.48907428, -12.51556561, -12.54205693,
        -12.56854826, -12.59503958, -12.62153091, -12.64802223,
        -12.67451356, -12.70100488],
       [ -4.07151471,  -4.09800604,  -4.12449736,  -4.15098869,
         -4.17748001,  -4.20397134,  -4.23046266,  -4.25695399,
         -4.28344531,  -4.30993664],
       [  4.31955353,   4.29306221,   4.26657088,   4.24007956,
          4.21358823,   4.18709691,   4.16060558,   4.13411426,
          4.10762293,   4.08113161],
       [ 12.71062178,  12.68413046,  12.65763913,  12.6311478 ,
         12.60465648,  12.57816515,  12.55167383,  12.5251825 ,
         12.49869118,  12.47219985],
       [ 21.10169003,  21.0751987 ,  21.04870738,  21.02221605,
         20.99572473,  20.9692334 ,  20.94274208,  20.91625075,
         20.88975943,  20.8632681 ],
       [ 29.49275827,  29.46626695,  29.43977562,  29.4132843 ,
         29.38679297,  29.36030165,  29.33381032,  29.307319  ,
         29.28082767,  29.25433635],
       [ 37.88382652,  37.85733519,  37.83084387,  37.80435254,
         37.77786122,  37.75136989,  37.72487857,  37.69838724,
         37.67189592,  37.64540459],
       [ 46.27489477,  46.24840344,  46.22191212,  46.19542079,
         46.16892947,  46.14243814,  46.11594682,  46.08945549,
         46.06296417,  46.03647284],
       [ 54.66596301,  54.63947169,  54.61298036,  54.58648904,
         54.55999771,  54.53350639,  54.50701506,  54.48052374,
         54.45403241,  54.42754109]])
In [159]:
from google.colab import files
import re

uploaded = files.upload()

for fn in uploaded.keys():
  fn_s = re.escape(fn)
  output_fn = fn.split('.', 1)[0]+'.html'
  output_fn_s = re.escape(output_fn)
  !jupyter nbconvert --to html $fn_s
  files.download(output_fn)
  !rm $fn_s
Upload widget is only available when the cell has been executed in the current browser session. Please rerun this cell to enable.