Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import matplotlib.pyplot as plt
- from sklearn.datasets import make_regression #dataset
- import random
- X,Y = make_regression(n_features=1, noise=10, n_samples=1000)
- plt.scatter(X,Y,s=5 , label = "training")
- plt.xlabel('Feature - X')
- plt.ylabel('Target - Y')
- const = [random.random(), random.random()] #const[0] is m and const[1] is b (assuming y = mx + b)
- def hypothesis(x , m , c):
- return (m*x) + c
- def cost(m , c):
- error = 0
- i = 0
- while i < len(X):
- error += (hypothesis(X[i], m , c) - Y[i])**2
- i+=1
- return error
- def SGD(x , y , learning_rate = 0.001):
- dCdm = 2*(hypothesis(x , const[0] , const[1]) - y)*x #dC/dm where C is cost function, found this out by chain rule
- dCdc = 2*(hypothesis(x , const[0] , const[1]) - y) #Similarly dC/db
- const[0] -= learning_rate * dCdm
- const[1] -= learning_rate * dCdc
- def epoch(j):
- i = 0
- while i < len(X):
- SGD(X[i] , Y[i])
- i+=1
- print("Epoch number "+str(j+1)+" cost equals " + str(cost(const[0], const[1])))
- print("Initial Cost = " + str(cost(const[0] , const[1])))
- for i in range(100):
- epoch(i+1)
- y = []
- for x in X:
- y.append(hypothesis(x , const[0] , const[1]))
- plt.scatter(X,y,s=5 , label = "prediction")
- plt.show()
- print(const[0] , const[1])
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement