Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import sys
- import numpy as np
- import matplotlib.pyplot as plt
- from matplotlib import cm
- from mpl_toolkits.mplot3d import Axes3D
- # Parameters
- args = sys.argv
- argerr = "Please run with arguments: (r alpha | j)"
- if len(args) < 2:
- print(argerr)
- sys.exit()
- mode = args[1] # r | j
- if mode not in ['r', 'j']:
- print(argerr)
- sys.exit()
- # Training data
- fileX = 'ex2x.dat'
- fileY = 'ex2y.dat'
- xsFile = open(fileX, 'r')
- ysFile = open(fileY, 'r')
- def loadNums(f, delim):
- return [ tuple(float(x) for x in line.strip().split(delim)) for line in f.read().splitlines() ]
- xs = [ np.array((1,) + xss) for xss in loadNums(xsFile, " ") ]
- ys = [ y[0] for y in loadNums(ysFile, " ") ]
- m = len(xs)
- theta = np.array([ 0 for x in xs[0] ])
- # Helpers
- def ht(t, x):
- return np.dot(t, x)
- def h(x):
- return ht(theta, x)
- def err(t, x, y):
- return ht(t, x) - y
- def J(t):
- return (1.0/(2*m)) * sum([ err(t,x,y)**2 for x, y in zip(xs, ys) ])
- def delta():
- return sum([ (h(x) - y) * x for x, y in zip(xs, ys) ])
- def gradDescStep():
- return theta - (a / m) * delta()
- def infer(t):
- return [ ht(t, x) for x in xs ]
- # Modes
- def regression(params):
- global theta
- t0 = theta
- t1 = gradDescStep()
- for i in range(100):
- theta = gradDescStep()
- t100 = theta
- for i in range(1400):
- theta = gradDescStep()
- t1500 = theta
- print("1st iteration: ", t1) # [ 0.07452802 0.38002167]
- print("1500th iteration: ", t1500) # [ 0.75015039, 0.06388338]
- xAxis = [x[1] for x in xs]
- plt.suptitle("Linear Regression Demo")
- plt.plot(xAxis, ys, "bo", label="data")
- plt.plot(xAxis, infer(t0), "y", label="initial")
- plt.plot(xAxis, infer(t1), "r", label="1 iteration")
- plt.plot(xAxis, infer(t100), "b", label="100 iterations")
- plt.plot(xAxis, infer(t1500), "g", label="1500 iterations")
- plt.axis([1, 9, -.1, 2])
- plt.legend()
- def Jplot():
- theta0_vals = np.arange(-2, 3, (5 / 50.0))
- theta1_vals = np.arange(-1, 1, (2 / 50.0))
- lt0 = len(theta0_vals)
- lt1 = len(theta1_vals)
- J_vals = [ [0 for j in range(lt1)] for i in range(lt0) ]
- for i in range(lt0):
- for j in range(lt1):
- t = [theta0_vals[i], theta1_vals[j]]
- J_vals[i][j] = J(t)
- X, Y = np.meshgrid(theta0_vals, theta1_vals)
- fig = plt.figure()
- fig.suptitle("J(theta) plot")
- ax = fig.add_subplot(111, projection='3d')
- ax.plot_surface(X, Y, J_vals, color='r', cmap=cm.coolwarm)
- if mode == 'r':
- if len(args) < 3:
- print(argerr)
- sys.exit()
- else:
- a = float(args[2]) # 0.07
- regression(a)
- else:
- Jplot()
- plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement