Advertisement
Guest User

Untitled

a guest
Sep 22nd, 2014
235
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 0.86 KB | None | 0 0
  1. import pandas as pd
  2.  
  3. from bs4 import BeautifulSoup
  4. import requests
  5. import re
  6.  
  7. wkn = 'DBX0G9'
  8. url = 'http://www.boerse-frankfurt.de/en/etfs/db+x+trackers+msci+world+information+technology+trn+index+ucits+etf+LU0540980496/price+turnover+history/historical+data'
  9.  
  10. page = 1
  11. dates = []
  12. prices = []
  13. while True:
  14.     soup = BeautifulSoup(requests.get(url+"#page="+str(page)).text)
  15.  
  16.     tmp_dates  = soup.findAll('td', class_='column-date')
  17.     tmp_dates  = [re.sub('[\\nt\s]','',d.string) for d in tmp_dates]
  18.     tmp_prices = soup.findAll('td', class_='column-price')  
  19.     tmp_prices = [float(re.sub('[\\nt\s]','',p.string)) for p in tmp_prices]
  20.     if not tmp_prices or page > 2:
  21.         break
  22.     else:
  23.         dates = dates + tmp_dates
  24.         prices = prices + tmp_prices
  25.         page = page + 1
  26.    
  27. df = pd.DataFrame(index = dates)
  28. df[wkn] = prices
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement