#! /usr/bin/env python3 # def regression_1d ( ): #*****************************************************************************80 # ## regression_1d uses keras to solve a simple 1d regression case. # # Licensing: # # This code is distributed under the MIT license. # # Modified: # # 25 April 2019 # # Author: # # John Burkardt. # # Reference: # # Francois Chollet, # Deep Learning with Python, # Manning, 2018, # ISBN: 9781617294433. # import keras import matplotlib.pyplot as plt import numpy as np import platform print ( '' ) print ( 'regression_1d:' ) print ( ' python version: %s' % ( platform.python_version ( ) ) ) print ( ' keras version: %s' % ( keras.__version__ ) ) print ( ' Neural network to solve a 1d regression problem.' ) print ( ' The data is read from external files.' ) # # Load the datasets from external files. # train = np.loadtxt ( 'regression_1d_train.txt' ) print ( train.shape ) train_data = train[:,0] train_targets = train[:,1] train_num = len ( train_data ) test = np.loadtxt ( 'regression_1d_test.txt' ) print ( test.shape ) test_data = test[:,0] test_targets = test[:,1] test_num = len ( test_data ) # # Exhibit the contents of one item of the training data: # print ( train_data[0:3], train_data[-3:] ) print ( train_targets[0:3], train_targets[-3:] ) # # Normalize the data. # # mean = train_data.mean ( axis = 0 ) # train_data = train_data - mean # std = train_data.std ( axis = 0 ) # train_data = train_data / std # test_data = test_data - mean # test_data = test_data / std from keras import models from keras import layers model = models.Sequential() model.add ( layers.Dense ( input_dim = 1, output_dim = 1 ) ) model.compile ( optimizer = 'sgd', loss = 'mse' ) print ( '' ) print ( 'Training:' ) print ( '' ) for step in range ( 101 ): cost = model.train_on_batch ( train_data, train_targets ) if ( step % 10 == 0 ): print ( 'step %d: training cost = %g' % ( step, cost ) ) # # Test # print ( '' ) print ( 'Testing:' ) print ( '' ) cost = model.evaluate ( test_data, test_targets, batch_size = test_num ) print ( 'Test cost:', cost ) W, b = model.layers[0].get_weights ( ) print ( 'Weights = ', W ) print ( 'Bias = ', b ) # # Plot the data. # xmin = -1.0 xmax = 2.0 plt.plot ( [xmin, xmax ], [ b + W[0] * xmin, b + W[0] * xmax], 'r-' ) plt.plot ( train_data, train_targets, 'bo' ) plt.plot ( test_data, test_targets, 'ro' ) plt.grid ( True ) plt.xlabel ( '<--- X --->', fontsize = 16 ) plt.ylabel ( '<--- Y --->', fontsize = 16 ) plt.title ( 'Data: train (blue), test (red)', fontsize = 16 ) filename = 'regression_1d.png' plt.savefig ( filename ) print ( '' ) print ( ' Graphics saved as "%s"' % ( filename ) ) plt.show ( ) # # Terminate. # print ( '' ) print ( 'regression_1d:' ) print ( ' Normal end of execution.' ) return def timestamp ( ): #*****************************************************************************80 # ## TIMESTAMP prints the date as a timestamp. # # Licensing: # # This code is distributed under the MIT license. # # Modified: # # 06 April 2013 # # Author: # # John Burkardt # # Parameters: # # None # import time t = time.time ( ) print ( time.ctime ( t ) ) return None if ( __name__ == '__main__' ): timestamp ( ) regression_1d ( ) timestamp ( )