Dec-21-2017, 12:22 PM
def getdata(url, values=None): r = requests.post(url, data=values, timeout=10) text = r.text r.close() return textYour code is overcomplicated.
import csv from bs4 import BeautifulSoup import requests def get(urls): for url in urls: yield requests.get(url).content.decode('utf-8') with open('BOOK.csv') as csv_: reader = csv.reader(csv_) urls = [line[1] for line in urls if line] webpages = list(get(urls)) for html in webpages: soup = BeautifulSoup(html, 'lxml') print(soup.pretify)You will be able to put together the rest.