update to tornado 4.0 and requests 2.3.0
This commit is contained in:
parent
060f459965
commit
f187000dc9
239 changed files with 19071 additions and 20369 deletions
|
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# vi:si:et:sw=4:sts=4:ts=4
|
||||
# GPL 2011
|
||||
__version__ = '2.1.644'
|
||||
__version__ = '2.1.651'
|
||||
|
||||
import cache
|
||||
import js
|
||||
|
|
|
|||
|
|
@ -233,7 +233,15 @@ class SQLiteCache(Cache):
|
|||
else:
|
||||
compressed = 0
|
||||
data = sqlite3.Binary(data)
|
||||
t = (url_hash, domain, url, post_data, json.dumps(headers), created,
|
||||
|
||||
#fixme: this looks wrong
|
||||
try:
|
||||
_headers = json.dumps(headers)
|
||||
except:
|
||||
for h in headers:
|
||||
headers[h] = headers[h].decode(detect_encoding(headers[h]))
|
||||
_headers = json.dumps(headers)
|
||||
t = (url_hash, domain, url, post_data, _headers, created,
|
||||
data, only_headers, compressed)
|
||||
c.execute(u"""INSERT OR REPLACE INTO cache values (?, ?, ?, ?, ?, ?, ?, ?, ?)""", t)
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,8 @@ __all__ = ['sha1sum', 'oshash', 'avinfo', 'makedirs']
|
|||
|
||||
EXTENSIONS = {
|
||||
'audio': [
|
||||
'aac', 'flac', 'm4a', 'mp3', 'oga', 'ogg', 'wav', 'wma'
|
||||
'aac', 'aif', 'aiff',
|
||||
'flac', 'm4a', 'mp3', 'oga', 'ogg', 'wav', 'wma'
|
||||
],
|
||||
'image': [
|
||||
'bmp', 'gif', 'jpeg', 'jpg', 'png', 'svg', 'webp'
|
||||
|
|
@ -27,7 +28,9 @@ EXTENSIONS = {
|
|||
'video': [
|
||||
'3gp',
|
||||
'avi', 'divx', 'dv', 'flv', 'm2t', 'm4v', 'mkv', 'mov', 'mp4',
|
||||
'mpeg', 'mpg', 'mts', 'ogm', 'ogv', 'rm', 'vob', 'webm', 'wmv'
|
||||
'mpeg', 'mpg', 'mts', 'ogm', 'ogv', 'rm', 'vob', 'webm', 'wmv',
|
||||
'mod', 'tod', # http://en.wikipedia.org/wiki/MOD_and_TOD
|
||||
'mxf'
|
||||
],
|
||||
}
|
||||
|
||||
|
|
@ -39,7 +42,10 @@ def cmd(program):
|
|||
|
||||
def _get_file_cache():
|
||||
import ox.cache
|
||||
return os.path.join(ox.cache.cache_path(), 'files.sqlite')
|
||||
path = ox.cache.cache_path()
|
||||
if path.startswith('fs:'):
|
||||
path = path[3:]
|
||||
return os.path.join(path, 'files.sqlite')
|
||||
|
||||
def cache(filename, type='oshash'):
|
||||
conn = sqlite3.connect(_get_file_cache(), timeout=10)
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
try:
|
||||
from django.utils import datetime
|
||||
except ImportError:
|
||||
except:
|
||||
from datetime import datetime
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -6,3 +6,27 @@ def find(query):
|
|||
print url
|
||||
data = json.loads(read_url(url))
|
||||
return data
|
||||
|
||||
def authors_ol(authors):
|
||||
r = []
|
||||
for a in authors:
|
||||
url = 'http://openlibrary.org%s.json' % a
|
||||
data = json.loads(read_url(url))
|
||||
r.append(data['name'])
|
||||
return r
|
||||
|
||||
def get_data(isbn):
|
||||
data = {}
|
||||
ol = find(isbn)
|
||||
if ol['docs']:
|
||||
d = ol['docs'][0]
|
||||
data['title'] = d['title']
|
||||
data['author'] = authors_ol(d['authors'])
|
||||
data['work'] = d['key']
|
||||
data['edition'] = d['edition_key'][0]
|
||||
url = 'https://openlibrary.org/books/%s.json' % data['edition']
|
||||
info = json.load(read_url(url))
|
||||
data['pages'] = info['number_of_pages']
|
||||
if 'dewey_decimal_class' in info:
|
||||
data['classification'] = info['dewey_decimal_class'][0]
|
||||
return data
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue