-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathL10 Boston House Prices Regression Problem.py
More file actions
97 lines (86 loc) · 3.82 KB
/
L10 Boston House Prices Regression Problem.py
File metadata and controls
97 lines (86 loc) · 3.82 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
## Regression problems are those whose outputs are a range of values
#import libraries
import numpy
import pandas
from keras.models import Sequential
from keras.layers import Dense
from keras.wrappers.scikit_learn import KerasRegressor
from sklearn.cross_validation import cross_val_score
from sklearn.cross_validation import KFold
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import Pipeline
# load dataset
# note that dataset is whitespace separated
dataframe = pandas.read_csv("housing.csv", delim_whitespace=True, header = None)
dataset = dataframe.values
# split into input(X) and output(Y)
X = dataset[:,0:13]
Y = dataset[:,13]
#base model
def baseline_model():
#create model
model = Sequential()
# 13i - 13 - 1o
model.add(Dense(13, input_dim = 13, init = 'normal', activation = 'relu'))
# no activation function is used for output layer because this is a regression problem
#and we are interested in predicting numerical values directly without transform
model.add(Dense(1, init = 'normal'))
model.compile(loss = 'mean_squared_error', optimizer='adam')
return model
def larger_model():
# increase layers
#create model
model = Sequential()
# 13i - 13 - 6 - 1o
model.add(Dense(13, input_dim = 13, init = 'normal', activation = 'relu'))
model.add(Dense(6, init='normal', activation='relu'))
# no activation function is used for output layer because this is a regression problem
#and we are interested in predicting numerical values directly without transform
model.add(Dense(1, init = 'normal'))
model.compile(loss = 'mean_squared_error', optimizer='adam')
return model
def wider_model():
# shallow network but with more neurons in the one hidden layer
#create model
model = Sequential()
# 13i - 20 - 1o
model.add(Dense(20, input_dim = 13, init = 'normal', activation = 'relu'))
# no activation function is used for output layer because this is a regression problem
#and we are interested in predicting numerical values directly without transform
model.add(Dense(1, init = 'normal'))
model.compile(loss = 'mean_squared_error', optimizer='adam')
return model
seed = 7
numpy.random.seed(seed)
estimators = []
#standardize the data
estimators.append(('standardize', StandardScaler()))
estimators.append(('mlp', KerasRegressor(build_fn=baseline_model, nb_epoch=100,batch_size=5, verbose = 0)))
pipeline = Pipeline(estimators)
#evaluating the model
kfold = KFold(n = len(X), n_folds=10, random_state=seed)
results = cross_val_score(pipeline, X, Y, cv=kfold)
estimatorsLarger = []
#standardize the data
estimatorsLarger.append(('standardize', StandardScaler()))
estimatorsLarger.append(('mlp', KerasRegressor(build_fn=larger_model, nb_epoch=100,batch_size=5, verbose = 0)))
pipelineLarger = Pipeline(estimatorsLarger)
#evaluating the model
kfold = KFold(n = len(X), n_folds=10, random_state=seed)
resultsLarger = cross_val_score(pipelineLarger, X, Y, cv=kfold)
estimatorsWider = []
#standardize the data
estimatorsWider.append(('standardize', StandardScaler()))
estimatorsWider.append(('mlp', KerasRegressor(build_fn=wider_model, nb_epoch=100,batch_size=5, verbose = 0)))
pipelineWider = Pipeline(estimatorsWider)
#evaluating the model
kfold = KFold(n = len(X), n_folds=10, random_state=seed)
resultsWider = cross_val_score(pipelineWider, X, Y, cv=kfold)
print("Results: %.2f (%.2f) MSE" % (abs(results.mean()), results.std()))
print("Larger: %.2f (%.2f) MSE" % (abs(resultsLarger.mean()), resultsLarger.std()))
print("Wider: %.2f (%.2f) MSE" % (abs(resultsWider.mean()), resultsWider.std()))
################################
# Results: 567.31 (277.51) MSE #
# Larger: 576.33 (288.83) MSE #
# Wider: 560.62 (272.04) MSE #
################################