2008-04-29 13:54:08 +00:00
|
|
|
# -*- coding: utf-8 -*-
|
2008-06-19 09:47:02 +00:00
|
|
|
# vi:si:et:sw=4:sts=4:ts=4
|
2008-04-29 13:54:08 +00:00
|
|
|
from urllib import urlencode
|
|
|
|
|
|
|
|
import simplejson
|
2009-10-12 15:18:59 +00:00
|
|
|
from oxlib.cache import readUrl, readUrlUnicode
|
|
|
|
from oxlib import findRe, decodeHtml
|
2008-04-29 13:54:08 +00:00
|
|
|
|
2008-06-19 09:47:02 +00:00
|
|
|
|
2009-07-15 13:29:22 +00:00
|
|
|
def getId(url):
|
|
|
|
return url.split("/")[-1]
|
|
|
|
|
|
|
|
def getUrl(id):
|
|
|
|
return "http://en.wikipedia.org/wiki/%s" % id
|
|
|
|
|
2009-07-17 10:50:38 +00:00
|
|
|
|
2008-04-29 13:54:08 +00:00
|
|
|
def getMovieId(title, director='', year=''):
|
2008-06-19 09:47:02 +00:00
|
|
|
query = '"%s" film %s %s' % (title, director, year)
|
|
|
|
result = find(query, 1)
|
|
|
|
if result:
|
|
|
|
return result[0][1]
|
|
|
|
return ''
|
2008-04-29 13:54:08 +00:00
|
|
|
|
2008-07-25 12:02:06 +00:00
|
|
|
def getUrlByImdbId(imdbId):
|
2009-07-10 08:47:01 +00:00
|
|
|
query = '"%s"'% imdbId
|
2008-06-19 09:47:02 +00:00
|
|
|
result = find(query)
|
|
|
|
if result:
|
|
|
|
url = result[0][1]
|
|
|
|
return url
|
2009-07-17 10:50:38 +00:00
|
|
|
return ""
|
2008-04-29 13:54:08 +00:00
|
|
|
|
2008-07-25 12:02:06 +00:00
|
|
|
def getUrlByImdb(imdbId):
|
|
|
|
# deprecated, use getUrlByImdbId()
|
|
|
|
return getUrlByImdbId(imdbId)
|
|
|
|
|
|
|
|
def getUrlByAllmovieId(allmovieId):
|
2008-07-25 16:05:48 +00:00
|
|
|
query = '"amg_id = 1:%s"'% allmovieId
|
2008-06-19 09:47:02 +00:00
|
|
|
result = find(query)
|
|
|
|
if result:
|
|
|
|
url = result[0][1]
|
|
|
|
return url
|
|
|
|
return ''
|
2008-05-10 09:10:23 +00:00
|
|
|
|
2008-05-10 10:55:26 +00:00
|
|
|
def getWikiData(wikipediaUrl):
|
2009-07-11 16:30:16 +00:00
|
|
|
url = wikipediaUrl.replace('wikipedia.org/wiki/', 'wikipedia.org/w/index.php?title=')
|
|
|
|
url = "%s&action=raw" % url
|
2009-10-12 11:47:43 +00:00
|
|
|
data = readUrlUnicode(url)
|
2008-06-19 09:47:02 +00:00
|
|
|
return data
|
2008-05-10 10:55:26 +00:00
|
|
|
|
|
|
|
def getMovieData(wikipediaUrl):
|
2009-07-15 13:29:22 +00:00
|
|
|
if not wikipediaUrl.startswith('http'): wikipediaUrl = getUrl(wikipediaUrl)
|
2008-06-19 09:47:02 +00:00
|
|
|
data = getWikiData(wikipediaUrl)
|
2009-07-10 08:47:01 +00:00
|
|
|
filmbox_data = findRe(data, '''\{\{Infobox.Film(.*?)\n\}\}''')
|
2008-06-19 09:47:02 +00:00
|
|
|
filmbox = {}
|
2009-07-11 16:30:16 +00:00
|
|
|
_box = filmbox_data.strip().split('\n|')
|
|
|
|
if len(_box) == 1:
|
|
|
|
_box = _box[0].split('|\n')
|
|
|
|
for row in _box:
|
2008-06-19 09:47:02 +00:00
|
|
|
d = row.split('=')
|
|
|
|
if len(d) == 2:
|
|
|
|
key = d[0].strip()
|
2009-07-10 08:47:01 +00:00
|
|
|
if key[0] == '|':
|
|
|
|
key = key[1:]
|
2008-06-19 09:47:02 +00:00
|
|
|
value = d[1].strip()
|
|
|
|
filmbox[key] = value
|
2009-07-10 08:47:01 +00:00
|
|
|
if 'imdb title' in data:
|
2009-07-11 09:45:31 +00:00
|
|
|
filmbox['imdb_id'] = findRe(data, 'imdb title\|.*?(\d*?)\|')
|
2009-07-11 16:30:16 +00:00
|
|
|
elif 'imdb episode' in data:
|
2009-07-11 10:33:51 +00:00
|
|
|
filmbox['imdb_id'] = findRe(data, 'imdb episode\|.*?(\d*?)\|')
|
2009-07-10 08:47:01 +00:00
|
|
|
if 'Amg movie' in data:
|
2009-07-11 09:45:31 +00:00
|
|
|
filmbox['amg_id'] = findRe(data, 'Amg movie\|.*?(\d*?)\|')
|
2009-07-15 15:15:31 +00:00
|
|
|
if 'amg_id' in filmbox and filmbox['amg_id'].startswith('1:'):
|
2009-07-14 11:34:55 +00:00
|
|
|
filmbox['amg_id'] = filmbox['amg_id'][2:]
|
|
|
|
|
2009-07-10 08:47:01 +00:00
|
|
|
if 'rotten-tomatoes' in data:
|
|
|
|
filmbox['rottentomatoes_id'] = findRe(data, 'rotten-tomatoes\|id\=(.*?)\|')
|
2009-07-11 09:45:31 +00:00
|
|
|
if not filmbox['rottentomatoes_id']:
|
|
|
|
filmbox['rottentomatoes_id'] = findRe(data, 'rotten-tomatoes\|(.*?)\|')
|
2009-07-11 16:30:16 +00:00
|
|
|
if 'google video' in data:
|
|
|
|
filmbox['google_video_id'] = findRe(data, 'google video\|.*?(\d*?)\|')
|
2009-07-11 10:33:51 +00:00
|
|
|
if 'DEFAULTSORT' in data:
|
|
|
|
filmbox['title_sort'] = findRe(data, '''\{\{DEFAULTSORT:(.*?)\}\}''')
|
2008-06-19 09:47:02 +00:00
|
|
|
return filmbox
|
2008-05-10 10:55:26 +00:00
|
|
|
|
2008-07-04 12:14:13 +00:00
|
|
|
def getImageUrl(name):
|
2009-10-12 11:47:43 +00:00
|
|
|
data = readUrlUnicode('http://en.wikipedia.org/wiki/Image:' + name)
|
2009-07-15 15:39:00 +00:00
|
|
|
url = findRe(data, 'href="(http://upload.wikimedia.org/.*?)"')
|
2008-07-04 12:14:13 +00:00
|
|
|
return url
|
|
|
|
|
2008-07-25 12:20:56 +00:00
|
|
|
def getPosterUrl(wikipediaUrl):
|
2009-07-15 13:29:22 +00:00
|
|
|
if not wikipediaUrl.startswith('http'): wikipediaUrl = getUrl(wikipediaUrl)
|
2008-07-04 12:14:13 +00:00
|
|
|
data = getMovieData(wikipediaUrl)
|
|
|
|
if 'image' in data:
|
|
|
|
return getImageUrl(data['image'])
|
|
|
|
return ''
|
|
|
|
|
2008-07-25 12:20:56 +00:00
|
|
|
def getMoviePoster(wikipediaUrl):
|
|
|
|
# deprecated, use getPosterUrl()
|
|
|
|
return getPosterUrl(wikipediaUrl)
|
|
|
|
|
2008-07-25 12:02:06 +00:00
|
|
|
def getAllmovieId(wikipediaUrl):
|
2008-06-19 09:47:02 +00:00
|
|
|
data = getMovieData(wikipediaUrl)
|
|
|
|
return data.get('amg_id', '')
|
2008-05-10 10:55:26 +00:00
|
|
|
|
2008-04-29 13:54:08 +00:00
|
|
|
def find(query, max_results=10):
|
2008-06-19 09:47:02 +00:00
|
|
|
query = {'action': 'query', 'list':'search', 'format': 'json',
|
|
|
|
'srlimit': max_results, 'srwhat': 'text', 'srsearch': query.encode('utf-8')}
|
|
|
|
url = "http://en.wikipedia.org/w/api.php?" + urlencode(query)
|
2009-10-12 11:47:43 +00:00
|
|
|
data = readUrl(url)
|
2008-06-19 09:47:02 +00:00
|
|
|
if not data:
|
2009-10-12 11:47:43 +00:00
|
|
|
data = readUrl(url, timeout=0)
|
2008-06-19 09:47:02 +00:00
|
|
|
result = simplejson.loads(data)
|
|
|
|
results = []
|
2008-11-11 00:34:34 +00:00
|
|
|
if result and 'query' in result:
|
2008-11-04 16:05:44 +00:00
|
|
|
for r in result['query']['search']:
|
|
|
|
title = r['title']
|
|
|
|
url = "http://en.wikipedia.org/wiki/%s" % title.replace(' ', '_')
|
|
|
|
results.append((title, url, ''))
|
2008-06-19 09:47:02 +00:00
|
|
|
return results
|
2008-04-29 13:54:08 +00:00
|
|
|
|