Guest User

Untitled

a guest
Jan 1st, 2018
119
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.23 KB | None | 0 0
  1. from pyspark.sql import SparkSession
  2. from pyspark.sql import SQLContext
  3.  
  4. hostname='localhost'
  5. jdbcPort=3306
  6. dbname='db'
  7. username='user'
  8. password='password'
  9.  
  10. #jdbc_url = "jdbc:mysql://{0}:{1}/{2}".format(hostname, jdbcPort, dbname)
  11. url="jdbc:mysql://"
  12. # For SQLServer, pass in the "driver" option
  13. # driverClass = "com.microsoft.sqlserver.jdbc.SQLServerDriver"
  14. # Add "driver" : driverClass
  15. connectionProperties = {
  16. "user" : username,
  17. "password" : password
  18. }
  19. pushdown_query = "select * from table LIMIT 10;"
  20. df = spark.read.jdbc(url=url, dbtable=pushdown_query, properties=connectionProperties)
  21. #sqlContext=SQLContext(sc)
  22. #df=sqlContext.read.jdbc(url=url, table=pushdown_query, properties=properties)
  23. display(df)
  24.  
  25. ---------------------------------------------------------------------------
  26. AttributeError Traceback (most recent call last)
  27. <ipython-input-21-70890f1cf807> in <module>()
  28. 15 }
  29. 16 pushdown_query = "select * from table LIMIT 10;"
  30. ---> 17 df = spark.read.jdbc(url=url, dbtable=pushdown_query, properties=connectionProperties)
  31. 18 #sqlContext=SQLContext(sc)
  32. 19 #df=sqlContext.read.jdbc(url=url, table=pushdown_query, properties=properties)
  33.  
  34. AttributeError: 'property' object has no attribute 'jdbc'
Add Comment
Please, Sign In to add comment