Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- """
- Given the starting point of any `x` gradient descent
- should be able to find the minimum value of x for the
- cost function `f` defined below.
- """
- import random
- def f(x):
- """
- Quadratic function.
- It's easy to see the minimum value of the function
- is 5 when is x=0.
- """
- return x**2 + 5
- def df(x):
- """
- Derivative of `f` with respect to `x`.
- """
- return 2*x
- def gradient_descent_update(x, gradx, learning_rate):
- """
- Performs a gradient descent update.
- """
- # TODO: Implement gradient descent.
- # x = x - learning_rate * gradient_of_x
- x = x - (gradx * learning_rate)
- # Return the new value for x
- return x
- # Random number between 0 and 10,000. Feel free to set x whatever you like.
- x = random.randint(0, 10000)
- # TODO: Set the learning rate
- learning_rate = 0.1
- epochs = 100
- for i in range(epochs+1):
- cost = f(x)
- gradx = df(x)
- print("EPOCH {}: Cost = {:.3f}, x = {:.3f}".format(i, cost, gradx))
- x = gradient_descent_update(x, gradx, learning_rate)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement