Advertisement
Guest User

Forward kinematics optimization

a guest
Jul 26th, 2013
71
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 2.49 KB | None | 0 0
  1. from numpy import *
  2. from scipy.optimize import minimize
  3.  
  4. # Denavit-Hartenberg Matrix as found on Wikipedia "Denavit-Hartenberg parameters"
  5. def DenHarMat(theta, alpha, a, d):
  6.     cos_theta = cos(theta)
  7.     sin_theta = sin(theta)
  8.     cos_alpha = cos(alpha)
  9.     sin_alpha = sin(alpha)
  10.  
  11.  
  12.     return array([
  13.         [cos_theta, -sin_theta*cos_alpha, sin_theta*sin_alpha, a*cos_theta],
  14.         [sin_theta, cos_theta*cos_alpha, -cos_theta*sin_alpha, a*sin_theta],
  15.         [0, sin_alpha, cos_alpha, d],
  16.         [0, 0, 0, 1],
  17.     ])
  18.    
  19.  
  20. def model_function(parameters, x):
  21.     # split parameter vector
  22.     scale_input, parameters = split(parameters,[3])
  23.     translate_input, parameters = split(parameters,[3])
  24.    
  25.     scale_output, parameters = split(parameters,[3])
  26.     translate_output, parameters = split(parameters,[3])
  27.    
  28.     p_T1, parameters = split(parameters,[3])
  29.     p_T2, parameters = split(parameters,[3])
  30.     p_T3, parameters = split(parameters,[3])
  31.    
  32.     # compute linear input distortions
  33.     theta = x * scale_input + translate_input
  34.    
  35.     # load Denavit-Hartenberg Matricies
  36.     T1 = DenHarMat(theta[0], p_T1[0], p_T1[1], p_T1[2])
  37.     T2 = DenHarMat(theta[1], p_T2[0], p_T2[1], p_T2[2])
  38.     T3 = DenHarMat(theta[2], p_T3[0], p_T3[1], p_T3[2])
  39.    
  40.     # compute joint transformations
  41.     # y = T1 * T2 * T3 * [0 0 0 1]
  42.     y = dot(T1,dot(T2,dot(T3,array([0,0,0,1]))))
  43.  
  44.     # compute linear output distortions
  45.     return y[0:3] * scale_output + translate_output
  46.  
  47.    
  48. # least squares cost function
  49. def cost_function(parameters, X, Y):
  50.     return sum(sum(square(model_function(parameters, X[i]) - Y[i])) for i in range(X.shape[0])) / X.shape[0]
  51.  
  52.  
  53. # ========== main script start ===========
  54.  
  55. # load data
  56. data = genfromtxt('data.txt', delimiter=',', dtype='float32')
  57. X = data[:,0:3]
  58. Y = data[:,3:6]
  59.  
  60.  
  61. cost = 9999999
  62.  
  63. #try:
  64. #   parameters = genfromtxt('parameters.txt', delimiter=',', dtype='float32')
  65. #   cost = cost_function(parameters, X, Y)
  66. #except IOError:
  67. #   pass
  68.  
  69.  
  70. # random init
  71. for i in range(100):
  72.     tmpParams = (random.rand(7*3)*2-1)*8
  73.     tmpCost = cost_function(tmpParams, X, Y)
  74.     if tmpCost < cost:
  75.         cost = tmpCost
  76.         parameters = tmpParams
  77.         print('Random Cost: ' + str(cost))
  78.         savetxt('parameters.txt', parameters, delimiter=',')
  79.  
  80.  
  81. # optimization
  82. continueOptimization = True
  83. while continueOptimization:
  84.     res = minimize(cost_function, parameters, args=(X,Y), method='nelder-mead', options={'maxiter':100,'xtol': 1e-5})
  85.     parameters = res.x
  86.     print(res.fun)
  87.     savetxt('parameters.txt', parameters, delimiter=',')
  88.     continueOptimization = not res.success
  89.  
  90.  
  91. print(res)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement