cleanup pylint errors and py2/3 issues

This commit is contained in:
j 2016-06-08 15:32:46 +02:00
commit 77f8876fca
20 changed files with 232 additions and 197 deletions

View file

@ -28,22 +28,32 @@ def get_show_url(title):
def get_data(url):
data = read_url(url, unicode=True)
doc = document_fromstring(data)
score = filter(lambda s: s.attrib.get('property') == 'v:average',
doc.xpath('//span[@class="score_value"]'))
score = [s for s in doc.xpath('//span[@class="score_value"]')
if s.attrib.get('property') == 'v:average']
if score:
score = int(score[0].text)
else:
score = -1
authors = [a.text
for a in doc.xpath('//div[@class="review_content"]//div[@class="author"]//a')]
sources = [d.text
for d in doc.xpath('//div[@class="review_content"]//div[@class="source"]/a')]
reviews = [d.text
for d in doc.xpath('//div[@class="review_content"]//div[@class="review_body"]')]
scores = [int(d.text.strip())
for d in doc.xpath('//div[@class="review_content"]//div[contains(@class, "critscore")]')]
urls = [a.attrib['href']
for a in doc.xpath('//div[@class="review_content"]//a[contains(@class, "external")]')]
authors = [
a.text
for a in doc.xpath('//div[@class="review_content"]//div[@class="author"]//a')
]
sources = [
d.text
for d in doc.xpath('//div[@class="review_content"]//div[@class="source"]/a')
]
reviews = [
d.text
for d in doc.xpath('//div[@class="review_content"]//div[@class="review_body"]')
]
scores = [
int(d.text.strip())
for d in doc.xpath('//div[@class="review_content"]//div[contains(@class, "critscore")]')
]
urls = [
a.attrib['href']
for a in doc.xpath('//div[@class="review_content"]//a[contains(@class, "external")]')
]
metacritics = []
for i in range(len(authors)):
@ -54,7 +64,7 @@ def get_data(url):
'quote': strip_tags(reviews[i]).strip(),
'score': scores[i],
})
return {
'critics': metacritics,
'id': get_id(url),