Deep Learning: Keras + Boston_housing

8858 단어 deeplearning
Predicting house price: a regression example
# -*- coding: utf-8 -*-

"""
@Date: 2018/10/7

@Author: dreamhomes

@Summary:
"""

from keras.datasets import boston_housing
from keras import models
from keras import layers

import numpy as np
import matplotlib.pyplot as plt

(train_data, train_targets), (test_data, test_targets) = boston_housing.load_data()

# print(train_data[0])
mean = train_data.mean(axis=0)
std = train_data.std(axis=0)
train_data -= mean
train_data /= std

test_data -= mean
test_data /= std


def build_model():
    """
    Instantiate the same model multiple times
    :return:
    """
    model = models.Sequential()
    model.add(layers.Dense(64, activation='relu', input_shape=(13,)))
    model.add(layers.Dense(64, activation='relu'))
    model.add(layers.Dense(1))
    model.compile(optimizer='rmsprop', loss='mse', metrics=['mae'])
    return model


# # K-fold cross-validation
#
# k = 4
# num_val_samples = len(train_data) // k
# num_epochs = 500
# all_scores = []
#
# # Saving the validation logs at each fold
# all_mae_histories = []
#
# for i in range(k):
#     print('processing fold #', i)
#     val_data = train_data[i * num_val_samples:(i + 1) * num_val_samples]
#     val_targets = train_targets[i * num_val_samples:(i + 1) * num_val_samples]
#
#     partial_train_data = np.concatenate(
#         [train_data[:i * num_val_samples], train_data[(i + 1) * num_val_samples:]], axis=0)
#     partial_train_targets = np.concatenate(
#         [train_targets[:i * num_val_samples], train_targets[(i + 1) * num_val_samples:]], axis=0)
#
#     model = build_model()
#     history = model.fit(
#         partial_train_data,
#         partial_train_targets,
#         validation_data=(val_data, val_targets),
#         epochs=num_epochs,
#         batch_size=1,
#         verbose=0)
#     # val_mse, val_mae = model.evaluate(val_data, val_targets, verbose=0)
#     # all_scores.append(val_mae)
#     # print(all_scores)
#     mae_history = history.history['val_mean_absolute_error']
#     all_mae_histories.append(mae_history)
#
# average_mae_history = [
#     np.mean([x[i] for x in all_mae_histories]) for i in range(num_epochs)]
#
# plt.plot(range(1, len(average_mae_history) + 1), average_mae_history)
# plt.xlabel('Epochs')
# plt.ylabel('Validation MAE')
# plt.show()

# Training the final model
model = build_model()
model.fit(train_data, train_targets, epochs=80, batch_size=16, verbose=1)
test_mse_score, test_mae_score = model.evaluate(test_data, test_targets)

print(test_mae_score)

좋은 웹페이지 즐겨찾기