python-oxweb/oxweb/wikipedia.py

73 lines
2.1 KiB
Python
Raw Normal View History

2008-04-29 13:54:08 +00:00
# -*- coding: utf-8 -*-
2008-06-19 09:47:02 +00:00
# vi:si:et:sw=4:sts=4:ts=4
2008-04-29 13:54:08 +00:00
from urllib import urlencode
import simplejson
2008-07-03 09:24:49 +00:00
from oxlib.cache import getUrl, getUrlUnicode
from oxlib import findRe, decodeHtml
2008-04-29 13:54:08 +00:00
2008-06-19 09:47:02 +00:00
2008-04-29 13:54:08 +00:00
def getMovieId(title, director='', year=''):
2008-06-19 09:47:02 +00:00
query = '"%s" film %s %s' % (title, director, year)
result = find(query, 1)
if result:
return result[0][1]
return ''
2008-04-29 13:54:08 +00:00
def getUrlByImdb(imdbId):
2008-06-19 09:47:02 +00:00
query = '"imdb_id = %s"'% imdbId
result = find(query)
if result:
url = result[0][1]
return url
if str(imdbId).startswith('0'):
imdbId = imdbId[1:]
return getUrlByImdb(imdbId)
2008-04-29 13:54:08 +00:00
def getUrlByAmbId(amg_id):
2008-06-19 09:47:02 +00:00
query = '"amg_id = %s"'% amg_id
result = find(query)
if result:
url = result[0][1]
return url
return ''
2008-05-10 10:55:26 +00:00
def getWikiData(wikipediaUrl):
2008-06-19 09:47:02 +00:00
title = wikipediaUrl.replace('http://en.wikipedia.org/wiki/', '')
url = "http://en.wikipedia.org/w/index.php?title=%s&action=edit" % title
html = getUrlUnicode(url)
data = decodeHtml(findRe(html, "<textarea.*?>(.*?)</textarea>"))
return data
2008-05-10 10:55:26 +00:00
def getMovieData(wikipediaUrl):
2008-06-19 09:47:02 +00:00
data = getWikiData(wikipediaUrl)
filmbox_data = findRe(data, '''\{\{Infobox Film(.*?)\}\}''')
filmbox = {}
for row in filmbox_data.strip().split('|'):
d = row.split('=')
if len(d) == 2:
key = d[0].strip()
value = d[1].strip()
filmbox[key] = value
return filmbox
2008-05-10 10:55:26 +00:00
2008-05-10 11:15:50 +00:00
def getAmgId(wikipediaUrl):
2008-06-19 09:47:02 +00:00
data = getMovieData(wikipediaUrl)
return data.get('amg_id', '')
2008-05-10 10:55:26 +00:00
2008-04-29 13:54:08 +00:00
def find(query, max_results=10):
2008-06-19 09:47:02 +00:00
query = {'action': 'query', 'list':'search', 'format': 'json',
'srlimit': max_results, 'srwhat': 'text', 'srsearch': query.encode('utf-8')}
url = "http://en.wikipedia.org/w/api.php?" + urlencode(query)
data = getUrl(url)
if not data:
data = getUrl(url, timeout=0)
result = simplejson.loads(data)
results = []
for r in result['query']['search']:
title = r['title']
url = "http://en.wikipedia.org/wiki/%s" % title.replace(' ', '_')
results.append((title, url, ''))
return results
2008-04-29 13:54:08 +00:00