Advertisement
Guest User

Untitled

a guest
Jul 27th, 2017
69
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.67 KB | None | 0 0
  1. log y[i,j,k] = alpha[i,k] + beta[j,k] + mu[k]
  2.  
  3. import pymc as pm
  4. from pymc import Normal, Uniform, MvNormal, Exponential, Gamma,InverseGamma
  5. from pymc import MCMC
  6.  
  7. mu = np.zeros(2, dtype=object)
  8. alpha = np.zeros([10,2], dtype = object)
  9. beta = np.zeros([10,2], dtype = object)
  10. for k in range(2):
  11. mu[k] = Normal('mu_{}'.format(k), 0,1000)
  12. for i in range(0,10):
  13. alpha[i][k] = Normal('alpha_{}_{}'.format(i,k), 0, 1000)
  14. beta[i][k] = Normal('beta_{}_{}'.format(i,k), 0, 1000)
  15. rho = Uniform('rho', lower = -1, upper = 1)
  16. sigma1 = InverseGamma('sigma1', 2.0001,1) #sigma squared
  17. sigma2 = InverseGamma('sigma2', 2.0001,1)
  18.  
  19. @pm.deterministic
  20. def PRECISION():
  21. PREC = [[sigma2/(sigma1*sigma2*(1-rho)),(-rho*
  22. (sigma1*sigma2)**0.5)/(sigma1*sigma2*(1-rho))],[(-rho*
  23. (sigma1*sigma2)**0.5)/(sigma1*sigma2*(1-rho)), sigma1/(sigma1*sigma2*(1-
  24. rho))]]
  25. return PREC
  26. mean = np.zeros([10,10,2])
  27. mean_list_1 = []
  28. mean_list_2 = []
  29. for i in range(10):
  30. for j in range(10):
  31. mean[i,j,0] = mu[0] + alpha[i][0] + beta[j][0]
  32. mean_list_1.append(mean[i,j,0])
  33. mean[i,j,1] = mu[1] + alpha[i][1] + beta[j][1]
  34. mean_list_2.append(mean[i,j,1])
  35.  
  36. #Restructure the vector
  37. bi_mean = np.zeros(55, dtype = object)
  38. bi_data = np.zeros(55, dtype = object)
  39. log_Y = np.zeros(55, dtype = object)
  40. for i in range(55):
  41. bi_mean[i] = [mean_list_1[i], mean_list_2[i]]
  42. bi_data[i] = [data[i], data[i+55]]
  43.  
  44. log_Y = [pm.MvNormal('log-Y_{}'.format(i), bi_mean[i], PRECISION, value =
  45. bi_data[i], observed = True) for i in range(55)]
  46.  
  47. monitor_list = [sigma1, sigma2, rho,mu, alpha, beta,log_Y]
  48. model = MCMC([monitor_list],calc_deviance=True)
  49. model.sample(iter=10000, burn=5000, thin=5)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement