Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- log y[i,j,k] = alpha[i,k] + beta[j,k] + mu[k]
- import pymc as pm
- from pymc import Normal, Uniform, MvNormal, Exponential, Gamma,InverseGamma
- from pymc import MCMC
- mu = np.zeros(2, dtype=object)
- alpha = np.zeros([10,2], dtype = object)
- beta = np.zeros([10,2], dtype = object)
- for k in range(2):
- mu[k] = Normal('mu_{}'.format(k), 0,1000)
- for i in range(0,10):
- alpha[i][k] = Normal('alpha_{}_{}'.format(i,k), 0, 1000)
- beta[i][k] = Normal('beta_{}_{}'.format(i,k), 0, 1000)
- rho = Uniform('rho', lower = -1, upper = 1)
- sigma1 = InverseGamma('sigma1', 2.0001,1) #sigma squared
- sigma2 = InverseGamma('sigma2', 2.0001,1)
- @pm.deterministic
- def PRECISION():
- PREC = [[sigma2/(sigma1*sigma2*(1-rho)),(-rho*
- (sigma1*sigma2)**0.5)/(sigma1*sigma2*(1-rho))],[(-rho*
- (sigma1*sigma2)**0.5)/(sigma1*sigma2*(1-rho)), sigma1/(sigma1*sigma2*(1-
- rho))]]
- return PREC
- mean = np.zeros([10,10,2])
- mean_list_1 = []
- mean_list_2 = []
- for i in range(10):
- for j in range(10):
- mean[i,j,0] = mu[0] + alpha[i][0] + beta[j][0]
- mean_list_1.append(mean[i,j,0])
- mean[i,j,1] = mu[1] + alpha[i][1] + beta[j][1]
- mean_list_2.append(mean[i,j,1])
- #Restructure the vector
- bi_mean = np.zeros(55, dtype = object)
- bi_data = np.zeros(55, dtype = object)
- log_Y = np.zeros(55, dtype = object)
- for i in range(55):
- bi_mean[i] = [mean_list_1[i], mean_list_2[i]]
- bi_data[i] = [data[i], data[i+55]]
- log_Y = [pm.MvNormal('log-Y_{}'.format(i), bi_mean[i], PRECISION, value =
- bi_data[i], observed = True) for i in range(55)]
- monitor_list = [sigma1, sigma2, rho,mu, alpha, beta,log_Y]
- model = MCMC([monitor_list],calc_deviance=True)
- model.sample(iter=10000, burn=5000, thin=5)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement