Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from selenium import webdriver
- driver= webdriver.Firefox()
- driver.get("https://www.website.com")
- f=open('output.txt','w')
- f.write(driver.page_source.encode('utf-8'))
- f.close()
- driver.quit()
- import mechanize
- browser = mechanize.Browser()
- browser.set_handle_robots(False)
- cookies = mechanize.CookieJar()
- browser.set_cookiejar(cookies)
- browser.addheaders = [('User-agent', 'Mozilla/5.0')]
- browser.set_handle_refresh(False)
- browser.open("https://www.website.com")
- from bs4 import BeautifulSoup as BS
- soup= BS(browser.response().read(),'lxml')
- print(soup.find(id="div_id"))
- <div id="div_id" data referrer="div_id">
- from lxml import etree
- parser= etree.HTMLParser()
- tree= etree.parse(open('source.txt'),parser)
- results= tree.xpath('//div[@id="div_id"]')
- print(etree.tostring(results[0]))
- import requests
- from fake_useragent import UserAgent
- ua=UserAgent()
- url= 'https://www.website.com'
- headers= {'User-agent': str(ua.chrome)}
- page = requests.get(url, headers=headers)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement