Advertisement
tariq786

Spark_Hbase.log

Jan 30th, 2016
351
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 152.55 KB | None | 0 0
  1. <pyspark.streaming.dstream.DStream object at 0x7fb069210ed0>
  2. Traceback (most recent call last):
  3. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  4. r = self.func(t, *rdds)
  5. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  6. func = lambda t, rdd: old_func(rdd)
  7. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  8. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  9. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  10. keyConverter, valueConverter, True)
  11. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  12. self.target_id, self.name)
  13. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  14. format(target_id, '.', name), value)
  15. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  16. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  17. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  18. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  19. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  20. at java.lang.Class.forName0(Native Method)
  21. at java.lang.Class.forName(Class.java:348)
  22. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  23. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  24. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  25. at scala.util.Try$.apply(Try.scala:161)
  26. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  27. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  28. at scala.Option.map(Option.scala:145)
  29. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  30. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  31. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  32. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  33. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  34. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  35. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  36. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  37. at java.lang.reflect.Method.invoke(Method.java:498)
  38. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  39. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  40. at py4j.Gateway.invoke(Gateway.java:259)
  41. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  42. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  43. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  44. at java.lang.Thread.run(Thread.java:745)
  45.  
  46. Traceback (most recent call last):
  47. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  48. r = self.func(t, *rdds)
  49. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  50. func = lambda t, rdd: old_func(rdd)
  51. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  52. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  53. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  54. keyConverter, valueConverter, True)
  55. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  56. self.target_id, self.name)
  57. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  58. format(target_id, '.', name), value)
  59. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  60. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  61. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  62. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  63. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  64. at java.lang.Class.forName0(Native Method)
  65. at java.lang.Class.forName(Class.java:348)
  66. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  67. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  68. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  69. at scala.util.Try$.apply(Try.scala:161)
  70. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  71. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  72. at scala.Option.map(Option.scala:145)
  73. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  74. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  75. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  76. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  77. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  78. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  79. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  80. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  81. at java.lang.reflect.Method.invoke(Method.java:498)
  82. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  83. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  84. at py4j.Gateway.invoke(Gateway.java:259)
  85. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  86. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  87. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  88. at java.lang.Thread.run(Thread.java:745)
  89.  
  90. Traceback (most recent call last):
  91. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  92. r = self.func(t, *rdds)
  93. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  94. func = lambda t, rdd: old_func(rdd)
  95. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  96. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  97. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  98. keyConverter, valueConverter, True)
  99. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  100. self.target_id, self.name)
  101. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  102. format(target_id, '.', name), value)
  103. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  104. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  105. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  106. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  107. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  108. at java.lang.Class.forName0(Native Method)
  109. at java.lang.Class.forName(Class.java:348)
  110. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  111. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  112. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  113. at scala.util.Try$.apply(Try.scala:161)
  114. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  115. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  116. at scala.Option.map(Option.scala:145)
  117. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  118. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  119. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  120. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  121. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  122. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  123. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  124. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  125. at java.lang.reflect.Method.invoke(Method.java:498)
  126. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  127. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  128. at py4j.Gateway.invoke(Gateway.java:259)
  129. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  130. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  131. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  132. at java.lang.Thread.run(Thread.java:745)
  133.  
  134. Traceback (most recent call last):
  135. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  136. r = self.func(t, *rdds)
  137. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  138. func = lambda t, rdd: old_func(rdd)
  139. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  140. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  141. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  142. keyConverter, valueConverter, True)
  143. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  144. self.target_id, self.name)
  145. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  146. format(target_id, '.', name), value)
  147. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  148. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  149. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  150. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  151. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  152. at java.lang.Class.forName0(Native Method)
  153. at java.lang.Class.forName(Class.java:348)
  154. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  155. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  156. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  157. at scala.util.Try$.apply(Try.scala:161)
  158. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  159. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  160. at scala.Option.map(Option.scala:145)
  161. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  162. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  163. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  164. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  165. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  166. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  167. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  168. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  169. at java.lang.reflect.Method.invoke(Method.java:498)
  170. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  171. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  172. at py4j.Gateway.invoke(Gateway.java:259)
  173. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  174. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  175. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  176. at java.lang.Thread.run(Thread.java:745)
  177.  
  178. Traceback (most recent call last):
  179. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  180. r = self.func(t, *rdds)
  181. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  182. func = lambda t, rdd: old_func(rdd)
  183. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  184. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  185. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  186. keyConverter, valueConverter, True)
  187. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  188. self.target_id, self.name)
  189. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  190. format(target_id, '.', name), value)
  191. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  192. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  193. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  194. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  195. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  196. at java.lang.Class.forName0(Native Method)
  197. at java.lang.Class.forName(Class.java:348)
  198. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  199. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  200. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  201. at scala.util.Try$.apply(Try.scala:161)
  202. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  203. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  204. at scala.Option.map(Option.scala:145)
  205. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  206. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  207. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  208. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  209. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  210. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  211. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  212. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  213. at java.lang.reflect.Method.invoke(Method.java:498)
  214. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  215. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  216. at py4j.Gateway.invoke(Gateway.java:259)
  217. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  218. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  219. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  220. at java.lang.Thread.run(Thread.java:745)
  221.  
  222. Traceback (most recent call last):
  223. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  224. r = self.func(t, *rdds)
  225. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  226. func = lambda t, rdd: old_func(rdd)
  227. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  228. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  229. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  230. keyConverter, valueConverter, True)
  231. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  232. self.target_id, self.name)
  233. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  234. format(target_id, '.', name), value)
  235. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  236. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  237. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  238. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  239. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  240. at java.lang.Class.forName0(Native Method)
  241. at java.lang.Class.forName(Class.java:348)
  242. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  243. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  244. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  245. at scala.util.Try$.apply(Try.scala:161)
  246. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  247. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  248. at scala.Option.map(Option.scala:145)
  249. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  250. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  251. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  252. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  253. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  254. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  255. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  256. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  257. at java.lang.reflect.Method.invoke(Method.java:498)
  258. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  259. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  260. at py4j.Gateway.invoke(Gateway.java:259)
  261. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  262. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  263. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  264. at java.lang.Thread.run(Thread.java:745)
  265.  
  266. Traceback (most recent call last):
  267. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  268. r = self.func(t, *rdds)
  269. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  270. func = lambda t, rdd: old_func(rdd)
  271. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  272. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  273. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  274. keyConverter, valueConverter, True)
  275. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  276. self.target_id, self.name)
  277. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  278. format(target_id, '.', name), value)
  279. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  280. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  281. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  282. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  283. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  284. at java.lang.Class.forName0(Native Method)
  285. at java.lang.Class.forName(Class.java:348)
  286. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  287. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  288. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  289. at scala.util.Try$.apply(Try.scala:161)
  290. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  291. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  292. at scala.Option.map(Option.scala:145)
  293. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  294. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  295. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  296. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  297. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  298. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  299. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  300. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  301. at java.lang.reflect.Method.invoke(Method.java:498)
  302. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  303. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  304. at py4j.Gateway.invoke(Gateway.java:259)
  305. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  306. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  307. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  308. at java.lang.Thread.run(Thread.java:745)
  309.  
  310. Traceback (most recent call last):
  311. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  312. r = self.func(t, *rdds)
  313. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  314. func = lambda t, rdd: old_func(rdd)
  315. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  316. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  317. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  318. keyConverter, valueConverter, True)
  319. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  320. self.target_id, self.name)
  321. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  322. format(target_id, '.', name), value)
  323. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  324. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  325. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  326. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  327. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  328. at java.lang.Class.forName0(Native Method)
  329. at java.lang.Class.forName(Class.java:348)
  330. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  331. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  332. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  333. at scala.util.Try$.apply(Try.scala:161)
  334. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  335. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  336. at scala.Option.map(Option.scala:145)
  337. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  338. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  339. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  340. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  341. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  342. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  343. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  344. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  345. at java.lang.reflect.Method.invoke(Method.java:498)
  346. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  347. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  348. at py4j.Gateway.invoke(Gateway.java:259)
  349. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  350. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  351. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  352. at java.lang.Thread.run(Thread.java:745)
  353.  
  354. Traceback (most recent call last):
  355. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  356. r = self.func(t, *rdds)
  357. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  358. func = lambda t, rdd: old_func(rdd)
  359. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  360. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  361. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  362. keyConverter, valueConverter, True)
  363. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  364. self.target_id, self.name)
  365. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  366. format(target_id, '.', name), value)
  367. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  368. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  369. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  370. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  371. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  372. at java.lang.Class.forName0(Native Method)
  373. at java.lang.Class.forName(Class.java:348)
  374. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  375. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  376. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  377. at scala.util.Try$.apply(Try.scala:161)
  378. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  379. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  380. at scala.Option.map(Option.scala:145)
  381. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  382. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  383. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  384. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  385. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  386. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  387. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  388. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  389. at java.lang.reflect.Method.invoke(Method.java:498)
  390. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  391. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  392. at py4j.Gateway.invoke(Gateway.java:259)
  393. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  394. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  395. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  396. at java.lang.Thread.run(Thread.java:745)
  397.  
  398. Traceback (most recent call last):
  399. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  400. r = self.func(t, *rdds)
  401. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  402. func = lambda t, rdd: old_func(rdd)
  403. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  404. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  405. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  406. keyConverter, valueConverter, True)
  407. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  408. self.target_id, self.name)
  409. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  410. format(target_id, '.', name), value)
  411. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  412. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  413. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  414. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  415. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  416. at java.lang.Class.forName0(Native Method)
  417. at java.lang.Class.forName(Class.java:348)
  418. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  419. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  420. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  421. at scala.util.Try$.apply(Try.scala:161)
  422. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  423. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  424. at scala.Option.map(Option.scala:145)
  425. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  426. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  427. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  428. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  429. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  430. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  431. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  432. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  433. at java.lang.reflect.Method.invoke(Method.java:498)
  434. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  435. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  436. at py4j.Gateway.invoke(Gateway.java:259)
  437. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  438. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  439. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  440. at java.lang.Thread.run(Thread.java:745)
  441.  
  442. Traceback (most recent call last):
  443. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  444. r = self.func(t, *rdds)
  445. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  446. func = lambda t, rdd: old_func(rdd)
  447. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  448. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  449. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  450. keyConverter, valueConverter, True)
  451. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  452. self.target_id, self.name)
  453. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  454. format(target_id, '.', name), value)
  455. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  456. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  457. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  458. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  459. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  460. at java.lang.Class.forName0(Native Method)
  461. at java.lang.Class.forName(Class.java:348)
  462. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  463. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  464. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  465. at scala.util.Try$.apply(Try.scala:161)
  466. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  467. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  468. at scala.Option.map(Option.scala:145)
  469. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  470. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  471. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  472. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  473. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  474. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  475. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  476. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  477. at java.lang.reflect.Method.invoke(Method.java:498)
  478. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  479. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  480. at py4j.Gateway.invoke(Gateway.java:259)
  481. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  482. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  483. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  484. at java.lang.Thread.run(Thread.java:745)
  485.  
  486. Traceback (most recent call last):
  487. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  488. r = self.func(t, *rdds)
  489. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  490. func = lambda t, rdd: old_func(rdd)
  491. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  492. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  493. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  494. keyConverter, valueConverter, True)
  495. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  496. self.target_id, self.name)
  497. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  498. format(target_id, '.', name), value)
  499. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  500. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  501. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  502. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  503. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  504. at java.lang.Class.forName0(Native Method)
  505. at java.lang.Class.forName(Class.java:348)
  506. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  507. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  508. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  509. at scala.util.Try$.apply(Try.scala:161)
  510. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  511. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  512. at scala.Option.map(Option.scala:145)
  513. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  514. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  515. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  516. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  517. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  518. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  519. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  520. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  521. at java.lang.reflect.Method.invoke(Method.java:498)
  522. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  523. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  524. at py4j.Gateway.invoke(Gateway.java:259)
  525. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  526. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  527. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  528. at java.lang.Thread.run(Thread.java:745)
  529.  
  530. Traceback (most recent call last):
  531. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  532. r = self.func(t, *rdds)
  533. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  534. func = lambda t, rdd: old_func(rdd)
  535. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  536. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  537. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  538. keyConverter, valueConverter, True)
  539. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  540. self.target_id, self.name)
  541. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  542. format(target_id, '.', name), value)
  543. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  544. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  545. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  546. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  547. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  548. at java.lang.Class.forName0(Native Method)
  549. at java.lang.Class.forName(Class.java:348)
  550. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  551. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  552. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  553. at scala.util.Try$.apply(Try.scala:161)
  554. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  555. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  556. at scala.Option.map(Option.scala:145)
  557. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  558. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  559. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  560. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  561. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  562. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  563. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  564. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  565. at java.lang.reflect.Method.invoke(Method.java:498)
  566. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  567. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  568. at py4j.Gateway.invoke(Gateway.java:259)
  569. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  570. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  571. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  572. at java.lang.Thread.run(Thread.java:745)
  573.  
  574. Traceback (most recent call last):
  575. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  576. r = self.func(t, *rdds)
  577. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  578. func = lambda t, rdd: old_func(rdd)
  579. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  580. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  581. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  582. keyConverter, valueConverter, True)
  583. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  584. self.target_id, self.name)
  585. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  586. format(target_id, '.', name), value)
  587. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  588. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  589. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  590. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  591. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  592. at java.lang.Class.forName0(Native Method)
  593. at java.lang.Class.forName(Class.java:348)
  594. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  595. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  596. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  597. at scala.util.Try$.apply(Try.scala:161)
  598. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  599. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  600. at scala.Option.map(Option.scala:145)
  601. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  602. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  603. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  604. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  605. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  606. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  607. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  608. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  609. at java.lang.reflect.Method.invoke(Method.java:498)
  610. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  611. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  612. at py4j.Gateway.invoke(Gateway.java:259)
  613. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  614. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  615. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  616. at java.lang.Thread.run(Thread.java:745)
  617.  
  618. Traceback (most recent call last):
  619. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  620. r = self.func(t, *rdds)
  621. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  622. func = lambda t, rdd: old_func(rdd)
  623. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  624. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  625. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  626. keyConverter, valueConverter, True)
  627. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  628. self.target_id, self.name)
  629. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  630. format(target_id, '.', name), value)
  631. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  632. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  633. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  634. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  635. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  636. at java.lang.Class.forName0(Native Method)
  637. at java.lang.Class.forName(Class.java:348)
  638. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  639. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  640. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  641. at scala.util.Try$.apply(Try.scala:161)
  642. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  643. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  644. at scala.Option.map(Option.scala:145)
  645. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  646. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  647. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  648. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  649. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  650. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  651. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  652. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  653. at java.lang.reflect.Method.invoke(Method.java:498)
  654. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  655. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  656. at py4j.Gateway.invoke(Gateway.java:259)
  657. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  658. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  659. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  660. at java.lang.Thread.run(Thread.java:745)
  661.  
  662. Traceback (most recent call last):
  663. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  664. r = self.func(t, *rdds)
  665. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  666. func = lambda t, rdd: old_func(rdd)
  667. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  668. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  669. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  670. keyConverter, valueConverter, True)
  671. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  672. self.target_id, self.name)
  673. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  674. format(target_id, '.', name), value)
  675. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  676. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  677. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  678. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  679. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  680. at java.lang.Class.forName0(Native Method)
  681. at java.lang.Class.forName(Class.java:348)
  682. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  683. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  684. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  685. at scala.util.Try$.apply(Try.scala:161)
  686. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  687. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  688. at scala.Option.map(Option.scala:145)
  689. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  690. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  691. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  692. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  693. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  694. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  695. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  696. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  697. at java.lang.reflect.Method.invoke(Method.java:498)
  698. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  699. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  700. at py4j.Gateway.invoke(Gateway.java:259)
  701. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  702. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  703. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  704. at java.lang.Thread.run(Thread.java:745)
  705.  
  706. Traceback (most recent call last):
  707. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  708. r = self.func(t, *rdds)
  709. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  710. func = lambda t, rdd: old_func(rdd)
  711. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  712. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  713. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  714. keyConverter, valueConverter, True)
  715. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  716. self.target_id, self.name)
  717. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  718. format(target_id, '.', name), value)
  719. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  720. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  721. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  722. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  723. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  724. at java.lang.Class.forName0(Native Method)
  725. at java.lang.Class.forName(Class.java:348)
  726. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  727. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  728. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  729. at scala.util.Try$.apply(Try.scala:161)
  730. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  731. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  732. at scala.Option.map(Option.scala:145)
  733. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  734. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  735. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  736. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  737. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  738. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  739. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  740. at java.lang.reflect.Method.invoke(Method.java:498)
  741. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  742. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  743. at py4j.Gateway.invoke(Gateway.java:259)
  744. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  745. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  746. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  747. at java.lang.Thread.run(Thread.java:745)
  748.  
  749. Traceback (most recent call last):
  750. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  751. r = self.func(t, *rdds)
  752. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  753. func = lambda t, rdd: old_func(rdd)
  754. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  755. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  756. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  757. keyConverter, valueConverter, True)
  758. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  759. self.target_id, self.name)
  760. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  761. format(target_id, '.', name), value)
  762. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  763. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  764. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  765. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  766. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  767. at java.lang.Class.forName0(Native Method)
  768. at java.lang.Class.forName(Class.java:348)
  769. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  770. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  771. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  772. at scala.util.Try$.apply(Try.scala:161)
  773. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  774. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  775. at scala.Option.map(Option.scala:145)
  776. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  777. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  778. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  779. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  780. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  781. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  782. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  783. at java.lang.reflect.Method.invoke(Method.java:498)
  784. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  785. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  786. at py4j.Gateway.invoke(Gateway.java:259)
  787. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  788. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  789. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  790. at java.lang.Thread.run(Thread.java:745)
  791.  
  792. Traceback (most recent call last):
  793. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  794. r = self.func(t, *rdds)
  795. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  796. func = lambda t, rdd: old_func(rdd)
  797. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  798. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  799. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  800. keyConverter, valueConverter, True)
  801. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  802. self.target_id, self.name)
  803. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  804. format(target_id, '.', name), value)
  805. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  806. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  807. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  808. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  809. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  810. at java.lang.Class.forName0(Native Method)
  811. at java.lang.Class.forName(Class.java:348)
  812. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  813. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  814. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  815. at scala.util.Try$.apply(Try.scala:161)
  816. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  817. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  818. at scala.Option.map(Option.scala:145)
  819. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  820. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  821. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  822. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  823. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  824. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  825. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  826. at java.lang.reflect.Method.invoke(Method.java:498)
  827. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  828. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  829. at py4j.Gateway.invoke(Gateway.java:259)
  830. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  831. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  832. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  833. at java.lang.Thread.run(Thread.java:745)
  834.  
  835. Traceback (most recent call last):
  836. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  837. r = self.func(t, *rdds)
  838. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  839. func = lambda t, rdd: old_func(rdd)
  840. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  841. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  842. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  843. keyConverter, valueConverter, True)
  844. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  845. self.target_id, self.name)
  846. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  847. format(target_id, '.', name), value)
  848. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  849. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  850. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  851. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  852. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  853. at java.lang.Class.forName0(Native Method)
  854. at java.lang.Class.forName(Class.java:348)
  855. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  856. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  857. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  858. at scala.util.Try$.apply(Try.scala:161)
  859. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  860. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  861. at scala.Option.map(Option.scala:145)
  862. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  863. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  864. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  865. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  866. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  867. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  868. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  869. at java.lang.reflect.Method.invoke(Method.java:498)
  870. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  871. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  872. at py4j.Gateway.invoke(Gateway.java:259)
  873. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  874. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  875. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  876. at java.lang.Thread.run(Thread.java:745)
  877.  
  878. Traceback (most recent call last):
  879. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  880. r = self.func(t, *rdds)
  881. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  882. func = lambda t, rdd: old_func(rdd)
  883. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  884. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  885. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  886. keyConverter, valueConverter, True)
  887. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  888. self.target_id, self.name)
  889. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  890. format(target_id, '.', name), value)
  891. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  892. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  893. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  894. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  895. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  896. at java.lang.Class.forName0(Native Method)
  897. at java.lang.Class.forName(Class.java:348)
  898. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  899. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  900. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  901. at scala.util.Try$.apply(Try.scala:161)
  902. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  903. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  904. at scala.Option.map(Option.scala:145)
  905. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  906. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  907. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  908. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  909. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  910. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  911. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  912. at java.lang.reflect.Method.invoke(Method.java:498)
  913. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  914. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  915. at py4j.Gateway.invoke(Gateway.java:259)
  916. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  917. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  918. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  919. at java.lang.Thread.run(Thread.java:745)
  920.  
  921. Traceback (most recent call last):
  922. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  923. r = self.func(t, *rdds)
  924. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  925. func = lambda t, rdd: old_func(rdd)
  926. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  927. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  928. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  929. keyConverter, valueConverter, True)
  930. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  931. self.target_id, self.name)
  932. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  933. format(target_id, '.', name), value)
  934. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  935. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  936. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  937. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  938. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  939. at java.lang.Class.forName0(Native Method)
  940. at java.lang.Class.forName(Class.java:348)
  941. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  942. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  943. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  944. at scala.util.Try$.apply(Try.scala:161)
  945. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  946. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  947. at scala.Option.map(Option.scala:145)
  948. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  949. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  950. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  951. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  952. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  953. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  954. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  955. at java.lang.reflect.Method.invoke(Method.java:498)
  956. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  957. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  958. at py4j.Gateway.invoke(Gateway.java:259)
  959. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  960. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  961. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  962. at java.lang.Thread.run(Thread.java:745)
  963.  
  964. Traceback (most recent call last):
  965. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  966. r = self.func(t, *rdds)
  967. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  968. func = lambda t, rdd: old_func(rdd)
  969. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  970. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  971. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  972. keyConverter, valueConverter, True)
  973. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  974. self.target_id, self.name)
  975. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  976. format(target_id, '.', name), value)
  977. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  978. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  979. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  980. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  981. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  982. at java.lang.Class.forName0(Native Method)
  983. at java.lang.Class.forName(Class.java:348)
  984. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  985. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  986. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  987. at scala.util.Try$.apply(Try.scala:161)
  988. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  989. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  990. at scala.Option.map(Option.scala:145)
  991. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  992. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  993. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  994. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  995. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  996. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  997. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  998. at java.lang.reflect.Method.invoke(Method.java:498)
  999. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1000. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1001. at py4j.Gateway.invoke(Gateway.java:259)
  1002. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1003. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1004. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1005. at java.lang.Thread.run(Thread.java:745)
  1006.  
  1007. Traceback (most recent call last):
  1008. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1009. r = self.func(t, *rdds)
  1010. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1011. func = lambda t, rdd: old_func(rdd)
  1012. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1013. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1014. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1015. keyConverter, valueConverter, True)
  1016. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1017. self.target_id, self.name)
  1018. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1019. format(target_id, '.', name), value)
  1020. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1021. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1022. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1023. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1024. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1025. at java.lang.Class.forName0(Native Method)
  1026. at java.lang.Class.forName(Class.java:348)
  1027. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1028. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1029. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1030. at scala.util.Try$.apply(Try.scala:161)
  1031. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1032. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1033. at scala.Option.map(Option.scala:145)
  1034. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1035. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1036. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1037. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1038. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1039. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1040. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1041. at java.lang.reflect.Method.invoke(Method.java:498)
  1042. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1043. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1044. at py4j.Gateway.invoke(Gateway.java:259)
  1045. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1046. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1047. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1048. at java.lang.Thread.run(Thread.java:745)
  1049.  
  1050. Traceback (most recent call last):
  1051. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1052. r = self.func(t, *rdds)
  1053. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1054. func = lambda t, rdd: old_func(rdd)
  1055. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1056. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1057. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1058. keyConverter, valueConverter, True)
  1059. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1060. self.target_id, self.name)
  1061. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1062. format(target_id, '.', name), value)
  1063. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1064. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1065. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1066. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1067. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1068. at java.lang.Class.forName0(Native Method)
  1069. at java.lang.Class.forName(Class.java:348)
  1070. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1071. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1072. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1073. at scala.util.Try$.apply(Try.scala:161)
  1074. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1075. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1076. at scala.Option.map(Option.scala:145)
  1077. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1078. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1079. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1080. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1081. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1082. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1083. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1084. at java.lang.reflect.Method.invoke(Method.java:498)
  1085. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1086. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1087. at py4j.Gateway.invoke(Gateway.java:259)
  1088. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1089. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1090. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1091. at java.lang.Thread.run(Thread.java:745)
  1092.  
  1093. Traceback (most recent call last):
  1094. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1095. r = self.func(t, *rdds)
  1096. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1097. func = lambda t, rdd: old_func(rdd)
  1098. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1099. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1100. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1101. keyConverter, valueConverter, True)
  1102. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1103. self.target_id, self.name)
  1104. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1105. format(target_id, '.', name), value)
  1106. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1107. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1108. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1109. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1110. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1111. at java.lang.Class.forName0(Native Method)
  1112. at java.lang.Class.forName(Class.java:348)
  1113. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1114. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1115. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1116. at scala.util.Try$.apply(Try.scala:161)
  1117. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1118. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1119. at scala.Option.map(Option.scala:145)
  1120. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1121. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1122. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1123. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1124. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1125. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1126. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1127. at java.lang.reflect.Method.invoke(Method.java:498)
  1128. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1129. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1130. at py4j.Gateway.invoke(Gateway.java:259)
  1131. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1132. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1133. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1134. at java.lang.Thread.run(Thread.java:745)
  1135.  
  1136. Traceback (most recent call last):
  1137. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1138. r = self.func(t, *rdds)
  1139. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1140. func = lambda t, rdd: old_func(rdd)
  1141. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1142. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1143. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1144. keyConverter, valueConverter, True)
  1145. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1146. self.target_id, self.name)
  1147. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1148. format(target_id, '.', name), value)
  1149. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1150. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1151. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1152. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1153. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1154. at java.lang.Class.forName0(Native Method)
  1155. at java.lang.Class.forName(Class.java:348)
  1156. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1157. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1158. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1159. at scala.util.Try$.apply(Try.scala:161)
  1160. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1161. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1162. at scala.Option.map(Option.scala:145)
  1163. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1164. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1165. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1166. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1167. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1168. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1169. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1170. at java.lang.reflect.Method.invoke(Method.java:498)
  1171. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1172. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1173. at py4j.Gateway.invoke(Gateway.java:259)
  1174. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1175. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1176. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1177. at java.lang.Thread.run(Thread.java:745)
  1178.  
  1179. Traceback (most recent call last):
  1180. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1181. r = self.func(t, *rdds)
  1182. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1183. func = lambda t, rdd: old_func(rdd)
  1184. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1185. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1186. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1187. keyConverter, valueConverter, True)
  1188. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1189. self.target_id, self.name)
  1190. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1191. format(target_id, '.', name), value)
  1192. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1193. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1194. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1195. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1196. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1197. at java.lang.Class.forName0(Native Method)
  1198. at java.lang.Class.forName(Class.java:348)
  1199. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1200. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1201. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1202. at scala.util.Try$.apply(Try.scala:161)
  1203. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1204. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1205. at scala.Option.map(Option.scala:145)
  1206. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1207. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1208. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1209. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1210. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1211. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1212. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1213. at java.lang.reflect.Method.invoke(Method.java:498)
  1214. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1215. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1216. at py4j.Gateway.invoke(Gateway.java:259)
  1217. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1218. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1219. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1220. at java.lang.Thread.run(Thread.java:745)
  1221.  
  1222. Traceback (most recent call last):
  1223. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1224. r = self.func(t, *rdds)
  1225. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1226. func = lambda t, rdd: old_func(rdd)
  1227. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1228. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1229. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1230. keyConverter, valueConverter, True)
  1231. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1232. self.target_id, self.name)
  1233. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1234. format(target_id, '.', name), value)
  1235. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1236. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1237. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1238. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1239. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1240. at java.lang.Class.forName0(Native Method)
  1241. at java.lang.Class.forName(Class.java:348)
  1242. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1243. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1244. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1245. at scala.util.Try$.apply(Try.scala:161)
  1246. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1247. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1248. at scala.Option.map(Option.scala:145)
  1249. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1250. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1251. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1252. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1253. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1254. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1255. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1256. at java.lang.reflect.Method.invoke(Method.java:498)
  1257. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1258. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1259. at py4j.Gateway.invoke(Gateway.java:259)
  1260. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1261. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1262. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1263. at java.lang.Thread.run(Thread.java:745)
  1264.  
  1265. Traceback (most recent call last):
  1266. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1267. r = self.func(t, *rdds)
  1268. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1269. func = lambda t, rdd: old_func(rdd)
  1270. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1271. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1272. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1273. keyConverter, valueConverter, True)
  1274. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1275. self.target_id, self.name)
  1276. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1277. format(target_id, '.', name), value)
  1278. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1279. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1280. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1281. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1282. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1283. at java.lang.Class.forName0(Native Method)
  1284. at java.lang.Class.forName(Class.java:348)
  1285. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1286. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1287. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1288. at scala.util.Try$.apply(Try.scala:161)
  1289. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1290. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1291. at scala.Option.map(Option.scala:145)
  1292. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1293. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1294. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1295. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1296. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1297. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1298. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1299. at java.lang.reflect.Method.invoke(Method.java:498)
  1300. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1301. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1302. at py4j.Gateway.invoke(Gateway.java:259)
  1303. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1304. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1305. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1306. at java.lang.Thread.run(Thread.java:745)
  1307.  
  1308. Traceback (most recent call last):
  1309. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1310. r = self.func(t, *rdds)
  1311. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1312. func = lambda t, rdd: old_func(rdd)
  1313. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1314. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1315. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1316. keyConverter, valueConverter, True)
  1317. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1318. self.target_id, self.name)
  1319. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1320. format(target_id, '.', name), value)
  1321. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1322. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1323. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1324. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1325. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1326. at java.lang.Class.forName0(Native Method)
  1327. at java.lang.Class.forName(Class.java:348)
  1328. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1329. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1330. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1331. at scala.util.Try$.apply(Try.scala:161)
  1332. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1333. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1334. at scala.Option.map(Option.scala:145)
  1335. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1336. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1337. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1338. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1339. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1340. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1341. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1342. at java.lang.reflect.Method.invoke(Method.java:498)
  1343. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1344. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1345. at py4j.Gateway.invoke(Gateway.java:259)
  1346. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1347. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1348. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1349. at java.lang.Thread.run(Thread.java:745)
  1350.  
  1351. Traceback (most recent call last):
  1352. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1353. r = self.func(t, *rdds)
  1354. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1355. func = lambda t, rdd: old_func(rdd)
  1356. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1357. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1358. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1359. keyConverter, valueConverter, True)
  1360. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1361. self.target_id, self.name)
  1362. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1363. format(target_id, '.', name), value)
  1364. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1365. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1366. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1367. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1368. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1369. at java.lang.Class.forName0(Native Method)
  1370. at java.lang.Class.forName(Class.java:348)
  1371. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1372. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1373. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1374. at scala.util.Try$.apply(Try.scala:161)
  1375. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1376. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1377. at scala.Option.map(Option.scala:145)
  1378. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1379. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1380. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1381. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1382. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1383. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1384. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1385. at java.lang.reflect.Method.invoke(Method.java:498)
  1386. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1387. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1388. at py4j.Gateway.invoke(Gateway.java:259)
  1389. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1390. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1391. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1392. at java.lang.Thread.run(Thread.java:745)
  1393.  
  1394. Traceback (most recent call last):
  1395. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1396. r = self.func(t, *rdds)
  1397. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1398. func = lambda t, rdd: old_func(rdd)
  1399. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1400. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1401. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1402. keyConverter, valueConverter, True)
  1403. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1404. self.target_id, self.name)
  1405. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1406. format(target_id, '.', name), value)
  1407. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1408. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1409. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1410. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1411. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1412. at java.lang.Class.forName0(Native Method)
  1413. at java.lang.Class.forName(Class.java:348)
  1414. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1415. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1416. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1417. at scala.util.Try$.apply(Try.scala:161)
  1418. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1419. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1420. at scala.Option.map(Option.scala:145)
  1421. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1422. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1423. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1424. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1425. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1426. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1427. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1428. at java.lang.reflect.Method.invoke(Method.java:498)
  1429. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1430. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1431. at py4j.Gateway.invoke(Gateway.java:259)
  1432. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1433. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1434. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1435. at java.lang.Thread.run(Thread.java:745)
  1436.  
  1437. Traceback (most recent call last):
  1438. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1439. r = self.func(t, *rdds)
  1440. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1441. func = lambda t, rdd: old_func(rdd)
  1442. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1443. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1444. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1445. keyConverter, valueConverter, True)
  1446. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1447. self.target_id, self.name)
  1448. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1449. format(target_id, '.', name), value)
  1450. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1451. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1452. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1453. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1454. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1455. at java.lang.Class.forName0(Native Method)
  1456. at java.lang.Class.forName(Class.java:348)
  1457. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1458. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1459. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1460. at scala.util.Try$.apply(Try.scala:161)
  1461. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1462. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1463. at scala.Option.map(Option.scala:145)
  1464. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1465. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1466. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1467. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1468. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1469. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1470. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1471. at java.lang.reflect.Method.invoke(Method.java:498)
  1472. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1473. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1474. at py4j.Gateway.invoke(Gateway.java:259)
  1475. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1476. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1477. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1478. at java.lang.Thread.run(Thread.java:745)
  1479.  
  1480. Traceback (most recent call last):
  1481. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1482. r = self.func(t, *rdds)
  1483. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1484. func = lambda t, rdd: old_func(rdd)
  1485. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1486. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1487. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1488. keyConverter, valueConverter, True)
  1489. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1490. self.target_id, self.name)
  1491. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1492. format(target_id, '.', name), value)
  1493. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1494. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1495. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1496. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1497. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1498. at java.lang.Class.forName0(Native Method)
  1499. at java.lang.Class.forName(Class.java:348)
  1500. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1501. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1502. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1503. at scala.util.Try$.apply(Try.scala:161)
  1504. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1505. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1506. at scala.Option.map(Option.scala:145)
  1507. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1508. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1509. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1510. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1511. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1512. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1513. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1514. at java.lang.reflect.Method.invoke(Method.java:498)
  1515. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1516. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1517. at py4j.Gateway.invoke(Gateway.java:259)
  1518. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1519. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1520. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1521. at java.lang.Thread.run(Thread.java:745)
  1522.  
  1523. Traceback (most recent call last):
  1524. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1525. r = self.func(t, *rdds)
  1526. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1527. func = lambda t, rdd: old_func(rdd)
  1528. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1529. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1530. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1531. keyConverter, valueConverter, True)
  1532. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1533. self.target_id, self.name)
  1534. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1535. format(target_id, '.', name), value)
  1536. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1537. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1538. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1539. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1540. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1541. at java.lang.Class.forName0(Native Method)
  1542. at java.lang.Class.forName(Class.java:348)
  1543. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1544. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1545. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1546. at scala.util.Try$.apply(Try.scala:161)
  1547. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1548. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1549. at scala.Option.map(Option.scala:145)
  1550. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1551. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1552. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1553. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1554. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1555. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1556. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1557. at java.lang.reflect.Method.invoke(Method.java:498)
  1558. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1559. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1560. at py4j.Gateway.invoke(Gateway.java:259)
  1561. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1562. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1563. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1564. at java.lang.Thread.run(Thread.java:745)
  1565.  
  1566. Traceback (most recent call last):
  1567. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1568. r = self.func(t, *rdds)
  1569. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1570. func = lambda t, rdd: old_func(rdd)
  1571. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1572. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1573. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1574. keyConverter, valueConverter, True)
  1575. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1576. self.target_id, self.name)
  1577. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1578. format(target_id, '.', name), value)
  1579. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1580. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1581. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1582. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1583. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1584. at java.lang.Class.forName0(Native Method)
  1585. at java.lang.Class.forName(Class.java:348)
  1586. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1587. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1588. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1589. at scala.util.Try$.apply(Try.scala:161)
  1590. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1591. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1592. at scala.Option.map(Option.scala:145)
  1593. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1594. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1595. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1596. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1597. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1598. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1599. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1600. at java.lang.reflect.Method.invoke(Method.java:498)
  1601. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1602. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1603. at py4j.Gateway.invoke(Gateway.java:259)
  1604. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1605. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1606. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1607. at java.lang.Thread.run(Thread.java:745)
  1608.  
  1609. Traceback (most recent call last):
  1610. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1611. r = self.func(t, *rdds)
  1612. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1613. func = lambda t, rdd: old_func(rdd)
  1614. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1615. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1616. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1617. keyConverter, valueConverter, True)
  1618. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1619. self.target_id, self.name)
  1620. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1621. format(target_id, '.', name), value)
  1622. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1623. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1624. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1625. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1626. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1627. at java.lang.Class.forName0(Native Method)
  1628. at java.lang.Class.forName(Class.java:348)
  1629. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1630. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1631. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1632. at scala.util.Try$.apply(Try.scala:161)
  1633. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1634. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1635. at scala.Option.map(Option.scala:145)
  1636. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1637. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1638. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1639. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1640. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1641. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1642. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1643. at java.lang.reflect.Method.invoke(Method.java:498)
  1644. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1645. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1646. at py4j.Gateway.invoke(Gateway.java:259)
  1647. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1648. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1649. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1650. at java.lang.Thread.run(Thread.java:745)
  1651.  
  1652. Traceback (most recent call last):
  1653. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1654. r = self.func(t, *rdds)
  1655. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1656. func = lambda t, rdd: old_func(rdd)
  1657. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1658. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1659. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1660. keyConverter, valueConverter, True)
  1661. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1662. self.target_id, self.name)
  1663. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1664. format(target_id, '.', name), value)
  1665. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1666. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1667. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1668. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1669. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1670. at java.lang.Class.forName0(Native Method)
  1671. at java.lang.Class.forName(Class.java:348)
  1672. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1673. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1674. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1675. at scala.util.Try$.apply(Try.scala:161)
  1676. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1677. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1678. at scala.Option.map(Option.scala:145)
  1679. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1680. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1681. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1682. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1683. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1684. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1685. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1686. at java.lang.reflect.Method.invoke(Method.java:498)
  1687. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1688. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1689. at py4j.Gateway.invoke(Gateway.java:259)
  1690. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1691. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1692. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1693. at java.lang.Thread.run(Thread.java:745)
  1694.  
  1695. Traceback (most recent call last):
  1696. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1697. r = self.func(t, *rdds)
  1698. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1699. func = lambda t, rdd: old_func(rdd)
  1700. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1701. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1702. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1703. keyConverter, valueConverter, True)
  1704. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1705. self.target_id, self.name)
  1706. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1707. format(target_id, '.', name), value)
  1708. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1709. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1710. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1711. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1712. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1713. at java.lang.Class.forName0(Native Method)
  1714. at java.lang.Class.forName(Class.java:348)
  1715. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1716. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1717. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1718. at scala.util.Try$.apply(Try.scala:161)
  1719. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1720. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1721. at scala.Option.map(Option.scala:145)
  1722. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1723. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1724. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1725. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1726. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1727. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1728. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1729. at java.lang.reflect.Method.invoke(Method.java:498)
  1730. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1731. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1732. at py4j.Gateway.invoke(Gateway.java:259)
  1733. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1734. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1735. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1736. at java.lang.Thread.run(Thread.java:745)
  1737.  
  1738. Traceback (most recent call last):
  1739. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1740. r = self.func(t, *rdds)
  1741. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1742. func = lambda t, rdd: old_func(rdd)
  1743. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1744. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1745. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1746. keyConverter, valueConverter, True)
  1747. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1748. self.target_id, self.name)
  1749. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1750. format(target_id, '.', name), value)
  1751. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1752. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1753. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1754. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1755. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1756. at java.lang.Class.forName0(Native Method)
  1757. at java.lang.Class.forName(Class.java:348)
  1758. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1759. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1760. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1761. at scala.util.Try$.apply(Try.scala:161)
  1762. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1763. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1764. at scala.Option.map(Option.scala:145)
  1765. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1766. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1767. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1768. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1769. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1770. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1771. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1772. at java.lang.reflect.Method.invoke(Method.java:498)
  1773. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1774. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1775. at py4j.Gateway.invoke(Gateway.java:259)
  1776. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1777. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1778. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1779. at java.lang.Thread.run(Thread.java:745)
  1780.  
  1781. Traceback (most recent call last):
  1782. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1783. r = self.func(t, *rdds)
  1784. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1785. func = lambda t, rdd: old_func(rdd)
  1786. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1787. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1788. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1789. keyConverter, valueConverter, True)
  1790. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1791. self.target_id, self.name)
  1792. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1793. format(target_id, '.', name), value)
  1794. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1795. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1796. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1797. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1798. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1799. at java.lang.Class.forName0(Native Method)
  1800. at java.lang.Class.forName(Class.java:348)
  1801. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1802. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1803. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1804. at scala.util.Try$.apply(Try.scala:161)
  1805. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1806. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1807. at scala.Option.map(Option.scala:145)
  1808. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1809. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1810. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1811. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1812. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1813. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1814. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1815. at java.lang.reflect.Method.invoke(Method.java:498)
  1816. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1817. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1818. at py4j.Gateway.invoke(Gateway.java:259)
  1819. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1820. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1821. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1822. at java.lang.Thread.run(Thread.java:745)
  1823.  
  1824. Traceback (most recent call last):
  1825. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1826. r = self.func(t, *rdds)
  1827. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1828. func = lambda t, rdd: old_func(rdd)
  1829. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1830. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1831. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1832. keyConverter, valueConverter, True)
  1833. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1834. self.target_id, self.name)
  1835. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1836. format(target_id, '.', name), value)
  1837. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1838. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1839. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1840. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1841. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1842. at java.lang.Class.forName0(Native Method)
  1843. at java.lang.Class.forName(Class.java:348)
  1844. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1845. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1846. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1847. at scala.util.Try$.apply(Try.scala:161)
  1848. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1849. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1850. at scala.Option.map(Option.scala:145)
  1851. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1852. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1853. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1854. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1855. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1856. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1857. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1858. at java.lang.reflect.Method.invoke(Method.java:498)
  1859. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1860. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1861. at py4j.Gateway.invoke(Gateway.java:259)
  1862. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1863. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1864. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1865. at java.lang.Thread.run(Thread.java:745)
  1866.  
  1867. Traceback (most recent call last):
  1868. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1869. r = self.func(t, *rdds)
  1870. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1871. func = lambda t, rdd: old_func(rdd)
  1872. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1873. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1874. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1875. keyConverter, valueConverter, True)
  1876. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1877. self.target_id, self.name)
  1878. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1879. format(target_id, '.', name), value)
  1880. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1881. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1882. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1883. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1884. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1885. at java.lang.Class.forName0(Native Method)
  1886. at java.lang.Class.forName(Class.java:348)
  1887. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1888. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1889. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1890. at scala.util.Try$.apply(Try.scala:161)
  1891. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1892. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1893. at scala.Option.map(Option.scala:145)
  1894. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1895. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1896. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1897. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1898. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1899. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1900. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1901. at java.lang.reflect.Method.invoke(Method.java:498)
  1902. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1903. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1904. at py4j.Gateway.invoke(Gateway.java:259)
  1905. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1906. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1907. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1908. at java.lang.Thread.run(Thread.java:745)
  1909.  
  1910. Traceback (most recent call last):
  1911. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1912. r = self.func(t, *rdds)
  1913. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1914. func = lambda t, rdd: old_func(rdd)
  1915. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1916. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1917. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1918. keyConverter, valueConverter, True)
  1919. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1920. self.target_id, self.name)
  1921. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1922. format(target_id, '.', name), value)
  1923. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1924. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1925. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1926. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1927. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1928. at java.lang.Class.forName0(Native Method)
  1929. at java.lang.Class.forName(Class.java:348)
  1930. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1931. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1932. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1933. at scala.util.Try$.apply(Try.scala:161)
  1934. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1935. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1936. at scala.Option.map(Option.scala:145)
  1937. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1938. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1939. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1940. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1941. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1942. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1943. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1944. at java.lang.reflect.Method.invoke(Method.java:498)
  1945. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1946. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1947. at py4j.Gateway.invoke(Gateway.java:259)
  1948. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1949. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1950. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1951. at java.lang.Thread.run(Thread.java:745)
  1952.  
  1953. Traceback (most recent call last):
  1954. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1955. r = self.func(t, *rdds)
  1956. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  1957. func = lambda t, rdd: old_func(rdd)
  1958. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  1959. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  1960. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  1961. keyConverter, valueConverter, True)
  1962. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  1963. self.target_id, self.name)
  1964. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  1965. format(target_id, '.', name), value)
  1966. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  1967. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  1968. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  1969. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  1970. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  1971. at java.lang.Class.forName0(Native Method)
  1972. at java.lang.Class.forName(Class.java:348)
  1973. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  1974. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  1975. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  1976. at scala.util.Try$.apply(Try.scala:161)
  1977. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  1978. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  1979. at scala.Option.map(Option.scala:145)
  1980. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  1981. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  1982. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  1983. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  1984. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  1985. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  1986. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  1987. at java.lang.reflect.Method.invoke(Method.java:498)
  1988. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  1989. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  1990. at py4j.Gateway.invoke(Gateway.java:259)
  1991. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  1992. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  1993. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  1994. at java.lang.Thread.run(Thread.java:745)
  1995.  
  1996. Traceback (most recent call last):
  1997. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  1998. r = self.func(t, *rdds)
  1999. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  2000. func = lambda t, rdd: old_func(rdd)
  2001. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  2002. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  2003. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  2004. keyConverter, valueConverter, True)
  2005. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  2006. self.target_id, self.name)
  2007. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  2008. format(target_id, '.', name), value)
  2009. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  2010. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  2011. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  2012. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  2013. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  2014. at java.lang.Class.forName0(Native Method)
  2015. at java.lang.Class.forName(Class.java:348)
  2016. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  2017. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  2018. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  2019. at scala.util.Try$.apply(Try.scala:161)
  2020. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  2021. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  2022. at scala.Option.map(Option.scala:145)
  2023. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  2024. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  2025. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  2026. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  2027. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  2028. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  2029. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  2030. at java.lang.reflect.Method.invoke(Method.java:498)
  2031. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  2032. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  2033. at py4j.Gateway.invoke(Gateway.java:259)
  2034. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  2035. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  2036. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  2037. at java.lang.Thread.run(Thread.java:745)
  2038.  
  2039. Traceback (most recent call last):
  2040. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  2041. r = self.func(t, *rdds)
  2042. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  2043. func = lambda t, rdd: old_func(rdd)
  2044. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  2045. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  2046. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  2047. keyConverter, valueConverter, True)
  2048. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  2049. self.target_id, self.name)
  2050. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  2051. format(target_id, '.', name), value)
  2052. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  2053. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  2054. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  2055. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  2056. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  2057. at java.lang.Class.forName0(Native Method)
  2058. at java.lang.Class.forName(Class.java:348)
  2059. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  2060. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  2061. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  2062. at scala.util.Try$.apply(Try.scala:161)
  2063. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  2064. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  2065. at scala.Option.map(Option.scala:145)
  2066. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  2067. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  2068. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  2069. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  2070. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  2071. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  2072. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  2073. at java.lang.reflect.Method.invoke(Method.java:498)
  2074. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  2075. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  2076. at py4j.Gateway.invoke(Gateway.java:259)
  2077. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  2078. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  2079. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  2080. at java.lang.Thread.run(Thread.java:745)
  2081.  
  2082. Traceback (most recent call last):
  2083. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  2084. r = self.func(t, *rdds)
  2085. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  2086. func = lambda t, rdd: old_func(rdd)
  2087. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  2088. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  2089. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  2090. keyConverter, valueConverter, True)
  2091. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  2092. self.target_id, self.name)
  2093. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  2094. format(target_id, '.', name), value)
  2095. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  2096. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  2097. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  2098. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  2099. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  2100. at java.lang.Class.forName0(Native Method)
  2101. at java.lang.Class.forName(Class.java:348)
  2102. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  2103. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  2104. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  2105. at scala.util.Try$.apply(Try.scala:161)
  2106. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  2107. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  2108. at scala.Option.map(Option.scala:145)
  2109. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  2110. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  2111. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  2112. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  2113. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  2114. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  2115. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  2116. at java.lang.reflect.Method.invoke(Method.java:498)
  2117. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  2118. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  2119. at py4j.Gateway.invoke(Gateway.java:259)
  2120. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  2121. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  2122. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  2123. at java.lang.Thread.run(Thread.java:745)
  2124.  
  2125. Traceback (most recent call last):
  2126. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  2127. r = self.func(t, *rdds)
  2128. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  2129. func = lambda t, rdd: old_func(rdd)
  2130. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  2131. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  2132. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  2133. keyConverter, valueConverter, True)
  2134. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  2135. self.target_id, self.name)
  2136. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  2137. format(target_id, '.', name), value)
  2138. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  2139. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  2140. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  2141. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  2142. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  2143. at java.lang.Class.forName0(Native Method)
  2144. at java.lang.Class.forName(Class.java:348)
  2145. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  2146. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  2147. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  2148. at scala.util.Try$.apply(Try.scala:161)
  2149. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  2150. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  2151. at scala.Option.map(Option.scala:145)
  2152. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  2153. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  2154. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  2155. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  2156. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  2157. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  2158. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  2159. at java.lang.reflect.Method.invoke(Method.java:498)
  2160. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  2161. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  2162. at py4j.Gateway.invoke(Gateway.java:259)
  2163. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  2164. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  2165. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  2166. at java.lang.Thread.run(Thread.java:745)
  2167.  
  2168. Traceback (most recent call last):
  2169. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/util.py", line 62, in call
  2170. r = self.func(t, *rdds)
  2171. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/dstream.py", line 159, in <lambda>
  2172. func = lambda t, rdd: old_func(rdd)
  2173. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 25, in SaveRecord
  2174. datamap.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)
  2175. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/rdd.py", line 1348, in saveAsNewAPIHadoopDataset
  2176. keyConverter, valueConverter, True)
  2177. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 538, in __call__
  2178. self.target_id, self.name)
  2179. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py", line 300, in get_return_value
  2180. format(target_id, '.', name), value)
  2181. Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset.
  2182. : java.lang.ClassNotFoundException: org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter
  2183. at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  2184. at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  2185. at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  2186. at java.lang.Class.forName0(Native Method)
  2187. at java.lang.Class.forName(Class.java:348)
  2188. at org.apache.spark.util.Utils$.classForName(Utils.scala:173)
  2189. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:46)
  2190. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1$$anonfun$1.apply(PythonHadoopUtil.scala:45)
  2191. at scala.util.Try$.apply(Try.scala:161)
  2192. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:45)
  2193. at org.apache.spark.api.python.Converter$$anonfun$getInstance$1.apply(PythonHadoopUtil.scala:44)
  2194. at scala.Option.map(Option.scala:145)
  2195. at org.apache.spark.api.python.Converter$.getInstance(PythonHadoopUtil.scala:44)
  2196. at org.apache.spark.api.python.PythonRDD$.getKeyValueConverters(PythonRDD.scala:691)
  2197. at org.apache.spark.api.python.PythonRDD$.convertRDD(PythonRDD.scala:704)
  2198. at org.apache.spark.api.python.PythonRDD$.saveAsHadoopDataset(PythonRDD.scala:801)
  2199. at org.apache.spark.api.python.PythonRDD.saveAsHadoopDataset(PythonRDD.scala)
  2200. at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
  2201. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  2202. at java.lang.reflect.Method.invoke(Method.java:498)
  2203. at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231)
  2204. at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379)
  2205. at py4j.Gateway.invoke(Gateway.java:259)
  2206. at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133)
  2207. at py4j.commands.CallCommand.execute(CallCommand.java:79)
  2208. at py4j.GatewayConnection.run(GatewayConnection.java:207)
  2209. at java.lang.Thread.run(Thread.java:745)
  2210.  
  2211. Traceback (most recent call last):
  2212. File "/home/ubuntu/unix_practice/spark-example/sp_json.py", line 39, in <module>
  2213. ssc.awaitTermination() # Wait for the computation to terminate
  2214. File "/usr/local/spark/python/lib/pyspark.zip/pyspark/streaming/context.py", line 247, in awaitTermination
  2215. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 536, in __call__
  2216. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 364, in send_command
  2217. File "/usr/local/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py", line 473, in send_command
  2218. File "/usr/lib/python2.7/socket.py", line 430, in readline
  2219. data = recv(1)
  2220. KeyboardInterrupt
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement