Guest User

Untitled

a guest
May 28th, 2018
239
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 8.34 KB | None | 0 0
  1. val jdbcDF = spark.read.format("jdbc").options(
  2. Map("url" -> "jdbc:mysql://our-host.readm.co.nz:3306/testdb?user=testuser&password=****",
  3. "dbtable" -> "testdb.tablename",
  4. "driver" -> "com.mysql.jdbc.Driver",
  5. "partitionColumn" -> "id", "lowerBound" -> "1", "upperBound" -> "41514638", "numPartitions" -> "20"
  6. )).load()
  7.  
  8. com.mysql.jdbc.CommunicationsException: Communications link failure due to underlying exception:
  9. ** BEGIN NESTED EXCEPTION **
  10. java.net.SocketException
  11. MESSAGE: java.net.ConnectException: Connection timed out (Connection timed out)
  12. STACKTRACE:
  13. java.net.SocketException: java.net.ConnectException: Connection timed out (Connection timed out)
  14. at com.mysql.jdbc.StandardSocketFactory.connect(StandardSocketFactory.java:156)
  15. at com.mysql.jdbc.MysqlIO.<init>(MysqlIO.java:276)
  16. at com.mysql.jdbc.Connection.createNewIO(Connection.java:2666)
  17. at com.mysql.jdbc.Connection.<init>(Connection.java:1531)
  18. at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:266)
  19. at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$createConnectionFactory$1.apply(JdbcUtils.scala:61)
  20. at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$createConnectionFactory$1.apply(JdbcUtils.scala:52)
  21. at org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:58)
  22. at org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation.<init>(JDBCRelation.scala:114)
  23. at org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider.createRelation(JdbcRelationProvider.scala:52)
  24. at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:307)
  25. at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:178)
  26. at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:146)
  27. at <init>(<console>:50)
  28. at <init>(<console>:55)
  29. at <init>(<console>:57)
  30. at <init>(<console>:59)
  31. at <init>(<console>:61)
  32. at <init>(<console>:63)
  33. at <init>(<console>:65)
  34. at <init>(<console>:67)
  35. at <init>(<console>:69)
  36. at <init>(<console>:71)
  37. at <init>(<console>:73)
  38. at <init>(<console>:75)
  39. at <init>(<console>:77)
  40. at <init>(<console>:79)
  41. at <init>(<console>:81)
  42. at <init>(<console>:83)
  43. at <init>(<console>:85)
  44. at <init>(<console>:87)
  45. at <init>(<console>:89)
  46. at <init>(<console>:91)
  47. at <init>(<console>:93)
  48. at .<init>(<console>:97)
  49. at .<clinit>(<console>)
  50. at .$print$lzycompute(<console>:7)
  51. at .$print(<console>:6)
  52. at $print(<console>)
  53. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  54. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
  55. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  56. at java.lang.reflect.Method.invoke(Method.java:498)
  57. at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786)
  58. at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047)
  59. at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638)
  60. at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637)
  61. at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)
  62. at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19)
  63. at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637)
  64. at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569)
  65. at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565)
  66. at sun.reflect.GeneratedMethodAccessor369.invoke(Unknown Source)
  67. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  68. at java.lang.reflect.Method.invoke(Method.java:498)
  69. at org.apache.zeppelin.spark.Utils.invokeMethod(Utils.java:38)
  70. at org.apache.zeppelin.spark.SparkInterpreter.interpret(SparkInterpreter.java:1000)
  71. at org.apache.zeppelin.spark.SparkInterpreter.interpretInput(SparkInterpreter.java:1205)
  72. at org.apache.zeppelin.spark.SparkInterpreter.interpret(SparkInterpreter.java:1172)
  73. at org.apache.zeppelin.spark.SparkInterpreter.interpret(SparkInterpreter.java:1165)
  74. at org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(LazyOpenInterpreter.java:97)
  75. at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:498)
  76. at org.apache.zeppelin.scheduler.Job.run(Job.java:175)
  77. at org.apache.zeppelin.scheduler.FIFOScheduler$1.run(FIFOScheduler.java:139)
  78. at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
  79. at java.util.concurrent.FutureTask.run(FutureTask.java:266)
  80. at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180)
  81. at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293)
  82. at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
  83. at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
  84. at java.lang.Thread.run(Thread.java:748)
  85. ** END NESTED EXCEPTION **
  86. Last packet sent to the server was 1 ms ago.
  87. at com.mysql.jdbc.Connection.createNewIO(Connection.java:2741)
  88. at com.mysql.jdbc.Connection.<init>(Connection.java:1531)
  89. at com.mysql.jdbc.NonRegisteringDriver.connect(NonRegisteringDriver.java:266)
  90. at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$createConnectionFactory$1.apply(JdbcUtils.scala:61)
  91. at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$$anonfun$createConnectionFactory$1.apply(JdbcUtils.scala:52)
  92. at org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:58)
  93. at org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation.<init>(JDBCRelation.scala:114)
  94. at org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider.createRelation(JdbcRelationProvider.scala:52)
  95. at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:307)
  96. at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:178)
  97. at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:146)
  98. ... 58 elided
  99.  
  100. val partitionColumn = "id"
  101. val dbUrl = "jdbc:mysql://our-host.readm.co.nz:3306"
  102.  
  103. val lowerBound = 1
  104. val upperBound = 41514638
  105.  
  106. val jdbcDF = spark.read.format("jdbc")
  107. .option("url", dbUrl)
  108. .option("databaseName", "testdb")
  109. .option("dbtable", "tablename")
  110. .option("user", "username")
  111. .option("password", "*****")
  112. .option("driver","com.mysql.jdbc.Driver")
  113. .option("partitionColumn", partitionColumn)
  114. .option("lowerBound", lowerBound)
  115. .option("upperBound", upperBound)
  116. .option("numPartitions", 20)
  117. .load()
  118.  
  119. java.lang.NullPointerException
  120. at org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:72)
  121. at org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation.<init>(JDBCRelation.scala:114)
  122. at org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider.createRelation(JdbcRelationProvider.scala:52)
  123. at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:307)
  124. at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:178)
  125. at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:146)
  126. ... 58 elided
  127.  
  128. import java.util.Properties
  129.  
  130. val table = "tablename"
  131. val url = "jdbc:mysql://our-host.readm.co.nz"
  132. val prop = new Properties()
  133. prop.put("driver","com.mysql.jdbc.Driver")
  134. prop.put("user","username")
  135. prop.put("password","****")
  136. prop.put("databaseName","testdb")
  137.  
  138. val jdbcDF = spark.read.jdbc(url,
  139. table,
  140. prop)
  141.  
  142. java.lang.NullPointerException
  143. at org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:72)
  144. at org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation.<init>(JDBCRelation.scala:114)
  145. at org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider.createRelation(JdbcRelationProvider.scala:52)
  146. at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:307)
  147. at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:178)
  148. at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:146)
  149. at org.apache.spark.sql.DataFrameReader.jdbc(DataFrameReader.scala:193)
  150. ... 58 elided
Add Comment
Please, Sign In to add comment