Skip to content

Instantly share code, notes, and snippets.

@superdzen
Forked from guangningyu/keras-regression.py
Last active May 10, 2019 07:58
Show Gist options
  • Select an option

  • Save superdzen/9b68b98f4a2142a28ea83dd33007a93c to your computer and use it in GitHub Desktop.

Select an option

Save superdzen/9b68b98f4a2142a28ea83dd33007a93c to your computer and use it in GitHub Desktop.
Reference: [Regression Tutorial with the Keras Deep Learning Library in Python](http://machinelearningmastery.com/regression-tutorial-keras-deep-learning-library-python/)
#!/usr/bin/env python
from urllib.request import urlopen
import numpy as np
from keras.models import Sequential
from keras.layers import Dense
from keras.wrappers.scikit_learn import KerasRegressor
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import KFold
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import Pipeline
def load_data():
X = []
Y = []
data_url = 'https://archive.ics.uci.edu/ml/machine-learning-databases/housing/housing.data'
for line in urlopen(data_url).readlines():
line = list(map(float, line.split()))
X.append(line[0:13])
Y.append(line[13])
return X, Y
def basic_model():
# create model
model = Sequential()
model.add(Dense(13, input_dim=13, kernel_initializer='normal', activation='relu'))
model.add(Dense(1, kernel_initializer='normal'))
# compile model
model.compile(loss='mean_squared_error', optimizer='adam')
return model
def deeper_model():
# create model
model = Sequential()
model.add(Dense(13, kernel_initializer='normal', activation='relu', input_dim=13))
model.add(Dense(6, kernel_initializer='normal', activation='relu'))
model.add(Dense(1, kernel_initializer='normal'))
# compile model
model.compile(loss='mean_squared_error', optimizer='adam')
return model
def wider_model():
# create model
model = Sequential()
model.add(Dense(20, input_dim=13, kernel_initializer='normal', activation='relu'))
model.add(Dense(1, kernel_initializer='normal'))
# compile model
model.compile(loss='mean_squared_error', optimizer='adam')
return model
def train(X, Y, fn, standardize=True, seed=7):
np.random.seed(seed)
estimators = []
if standardize:
estimators.append(('standardize', StandardScaler()))
estimators.append(('mlp', KerasRegressor(build_fn=fn, epochs=50, batch_size=5, verbose=0)))
pipeline = Pipeline(estimators)
kfold = KFold(n_splits=10, random_state=seed)
results = cross_val_score(pipeline, X, Y, cv=kfold)
print('Result: %.2f (%.2f) MSE' % (results.mean(), results.std()))
if __name__ == '__main__':
X, Y = load_data()
train(X, Y, fn=basic_model, standardize=False, seed=7)
train(X, Y, fn=basic_model, standardize=True, seed=7)
train(X, Y, fn=deeper_model, standardize=True, seed=7)
train(X, Y, fn=wider_model, standardize=True, seed=7)
@superdzen
Copy link
Author

Python 3.6.2 (Anaconda 5.0.0)
Keras 2.0.6
NumPy 1.13.1
TensorFlow 1.3.0
PyCharm Community Edition 2017.2.3
Build #PC-172.3968.37, built on September 1, 2017
JRE: 1.8.0_152-release-915-b11 amd64
JVM: OpenJDK 64-Bit Server VM by JetBrains s.r.o
Windows 10 10.0

@superdzen
Copy link
Author

Result: 36.93 (21.52) MSE
Result: 29.45 (27.62) MSE
Result: 23.21 (27.01) MSE
Result: 24.54 (25.06) MSE

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment