Guest User

Untitled

a guest
Jul 18th, 2018
80
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 2.72 KB | None | 0 0
  1. shape, loc, scale = sm.lognorm.fit(dataToLearn, floc = 0)
  2.  
  3. for b in bounds:
  4. toPlot.append((b, currCount+sm.lognorm.ppf(b, s = shape, loc = loc, scale = scale)))
  5.  
  6. for i, d in enumerate(dataToLearn):
  7. dataToLearn2 += int(w[i] * 100) * [d]
  8.  
  9. import numpy as np
  10.  
  11. dataToLearn = np.array([1,2,3,4,5])
  12. weights = np.array([1,2,1,1,3])
  13.  
  14. print(np.repeat(dataToLearn, weights))
  15. # Output: array([1, 2, 2, 3, 4, 5, 5, 5])
  16.  
  17. import timeit
  18.  
  19. code_before = """
  20. weights = np.array([1,2,1,1,3] * 1000)
  21. dataToLearn = np.array([1,2,3,4,5] * 1000)
  22. dataToLearn2 = []
  23. for i, d in enumerate(dataToLearn):
  24. dataToLearn2 += int(weights[i]) * [d]
  25. """
  26.  
  27. code_after = """
  28. weights = np.array([1,2,1,1,3] * 1000)
  29. dataToLearn = np.array([1,2,3,4,5] * 1000)
  30. np.repeat(dataToLearn, weights)
  31. """
  32.  
  33. print(timeit.timeit(code_before, setup="import numpy as np", number=1000))
  34. print(timeit.timeit(code_after, setup="import numpy as np", number=1000))
  35.  
  36. import numpy as np
  37. from scipy.stats import lognorm
  38.  
  39.  
  40. # Sample data and weights. To enable an exact comparison with
  41. # the method of generating an array with the values repeated
  42. # according to their weight, I use an array of weights that is
  43. # all integers.
  44. x = np.array([2.5, 8.4, 9.3, 10.8, 6.8, 1.9, 2.0])
  45. w = np.array([ 1, 1, 2, 1, 3, 3, 1])
  46.  
  47.  
  48. #-----------------------------------------------------------------------------
  49. # Fit the log-normal distribution by creating an array containing the values
  50. # repeated according to their weight.
  51. xx = np.repeat(x, w)
  52.  
  53. # Use the explicit formulas for the MLE of the log-normal distribution.
  54. lnxx = np.log(xx)
  55. muhat = np.mean(lnxx)
  56. varhat = np.var(lnxx)
  57.  
  58. shape = np.sqrt(varhat)
  59. scale = np.exp(muhat)
  60.  
  61. print("MLE using repeated array: shape=%7.5f scale=%7.5f" % (shape, scale))
  62.  
  63.  
  64. #-----------------------------------------------------------------------------
  65. # Use the explicit formulas for the weighted MLE of the log-normal
  66. # distribution.
  67.  
  68. lnx = np.log(x)
  69. muhat = np.average(lnx, weights=w)
  70. # varhat is the weighted variance of ln(x). There isn't a function in
  71. # numpy for the weighted variance, so we compute it using np.average.
  72. varhat = np.average((lnx - muhat)**2, weights=w)
  73.  
  74. shape = np.sqrt(varhat)
  75. scale = np.exp(muhat)
  76.  
  77. print("MLE using weights: shape=%7.5f scale=%7.5f" % (shape, scale))
  78.  
  79.  
  80. #-----------------------------------------------------------------------------
  81. # Might as well check that we get the same result from lognorm.fit() using the
  82. # repeated array
  83.  
  84. shape, loc, scale = lognorm.fit(xx, floc=0)
  85.  
  86. print("MLE using lognorm.fit: shape=%7.5f scale=%7.5f" % (shape, scale))
  87.  
  88. MLE using repeated array: shape=0.70423 scale=4.57740
  89. MLE using weights: shape=0.70423 scale=4.57740
  90. MLE using lognorm.fit: shape=0.70423 scale=4.57740
Add Comment
Please, Sign In to add comment