Advertisement
Guest User

Untitled

a guest
Mar 22nd, 2016
42
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 5.16 KB | None | 0 0
  1. import numpy
  2.  
  3. from moe.optimal_learning.python.cpp_wrappers.covariance import SquareExponential as cppSquareExponential
  4. from moe.optimal_learning.python.cpp_wrappers.domain import TensorProductDomain as cppTensorProductDomain
  5. from moe.optimal_learning.python.cpp_wrappers.gaussian_process import GaussianProcess as cppGaussianProcess
  6. from moe.optimal_learning.python.cpp_wrappers.optimization import GradientDescentParameters as cppGradientDescentParameters
  7. from moe.optimal_learning.python.data_containers import HistoricalData
  8. from moe.optimal_learning.python.geometry_utils import ClosedInterval
  9. from moe.optimal_learning.python.python_version.covariance import SquareExponential as pythonSquareExponential
  10. from moe.optimal_learning.python.python_version.domain import TensorProductDomain as pythonTensorProductDomain
  11. from moe.optimal_learning.python.python_version.gaussian_process import GaussianProcess as pythonGaussianProcess
  12.  
  13. from moe.optimal_learning.python.cpp_wrappers.expected_improvement import ExpectedImprovement as cppExpectedImprovement
  14. from moe.optimal_learning.python.cpp_wrappers.expected_improvement import multistart_expected_improvement_optimization
  15. from moe.optimal_learning.python.cpp_wrappers.optimization import GradientDescentOptimizer as cppGradientDescentOptimizer
  16.  
  17. from moe.optimal_learning.python.cpp_wrappers.log_likelihood import GaussianProcessLogLikelihood as cppGaussianProcessLogLikelihood
  18. from moe.optimal_learning.python.cpp_wrappers.log_likelihood import multistart_hyperparameter_optimization
  19. from moe.optimal_learning.python.cpp_wrappers.optimization import NewtonOptimizer as cppNewtonOptimizer
  20. from moe.optimal_learning.python.cpp_wrappers.optimization import NewtonParameters as cppNewtonParameters
  21.  
  22. sgd_params = cppGradientDescentParameters(num_multistarts=100, max_num_steps=50, max_num_restarts=2,
  23.                                           num_steps_averaged=15, gamma=0.7, pre_mult=1.0,
  24.                                           max_relative_change=0.7, tolerance=1.0e-3)
  25. class Branin(object):
  26.     def __init__(self):
  27.         self._dim = 2
  28.         self._search_domain = numpy.repeat([[-15., 15.]], 2, axis=0)
  29.         self._hyper_domain = numpy.array([[10., 100.], [0.1, 15.], [0.1, 15.]])
  30.         self._num_init_pts = 15
  31.         self._sample_var = 0.01
  32.  
  33.     def evaluate(self, x):
  34.         """ This function is usually evaluated on the square x_1 \in [-5, 10], x_2 \in [0, 15]. Global minimum
  35.        is at x = [-pi, 12.275], [pi, 2.275] and [9.42478, 2.475] with minima f(x*) = 0.397887.
  36.  
  37.            :param x[2]: 2-dim numpy array
  38.        """
  39.         a = 1
  40.         b = 5.1 / (4 * pow(numpy.pi, 2.0))
  41.         c = 5 / numpy.pi
  42.         r = 6
  43.         s = 10
  44.         t = 1 / (8 * numpy.pi)
  45.         return (a * pow(x[1] - b * pow(x[0], 2.0) + c * x[0] - r, 2.0) + s * (1 - t) * numpy.cos(x[0]) + s)
  46.  
  47.  
  48. def hyper_opt_newton(cpp_cov, data, hyper_domain):
  49.     cpp_search_domain = cppTensorProductDomain([ClosedInterval(bound[0], bound[1]) for bound in hyper_domain])
  50.     newton_opt_param = cppNewtonParameters(num_multistarts=100, max_num_steps=100, gamma=1.01, time_factor=1.0e-3, max_relative_change=1.0, tolerance=1.0e-10)
  51.     cpp_gp_loglikelihood = cppGaussianProcessLogLikelihood(cpp_cov, data)
  52.     newton_optimizer = cppNewtonOptimizer(cpp_search_domain, cpp_gp_loglikelihood, newton_opt_param)
  53.     return multistart_hyperparameter_optimization(newton_optimizer, -1, max_num_threads=16)
  54.  
  55. if __name__ == "__main__":
  56.     objective_func = Branin()
  57.     python_search_domain = pythonTensorProductDomain([ClosedInterval(bound[0], bound[1]) for bound in objective_func._search_domain])
  58.     cpp_search_domain = cppTensorProductDomain([ClosedInterval(bound[0], bound[1]) for bound in objective_func._search_domain])
  59.     initial_points = python_search_domain.generate_uniform_random_points_in_domain(15)
  60.     hist_data = HistoricalData(objective_func._dim)
  61.     hist_data.append_historical_data(initial_points, [objective_func.evaluate(pt) for pt in initial_points], objective_func._sample_var * numpy.ones(len(initial_points)))
  62.     python_cov = pythonSquareExponential(numpy.ones(objective_func._dim+1))
  63.     cpp_cov = cppSquareExponential(numpy.ones(objective_func._dim+1))
  64.  
  65.     for i in range(50):
  66.         hyper = hyper_opt_newton(cpp_cov, hist_data, objective_func._hyper_domain)
  67.         python_cov.set_hyperparameters(hyper)
  68.         cpp_cov.set_hyperparameters(hyper)
  69.         python_gp = pythonGaussianProcess(python_cov, hist_data)
  70.         cpp_gp = cppGaussianProcess(cpp_cov, hist_data)
  71.         cpp_ei_evaluator = cppExpectedImprovement(gaussian_process=cpp_gp, num_mc_iterations=int(1e6))
  72.         optimizer = cppGradientDescentOptimizer(cpp_search_domain, cpp_ei_evaluator, sgd_params, int(1000))
  73.         next_points = multistart_expected_improvement_optimization(optimizer, None, 1, use_gpu=False, which_gpu=0, max_num_threads=1)
  74.         cpp_ei_evaluator.set_current_point(next_points)
  75.         print "{0}th itr, best val: {1}, voi: {2}".format(i, numpy.amin(hist_data.points_sampled_value), cpp_ei_evaluator.compute_expected_improvement())
  76.         hist_data.append_historical_data(next_points, [objective_func.evaluate(next_points[0])], [objective_func._sample_var])
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement