pull python-ox changes
This commit is contained in:
parent
a04e0ede04
commit
7040fdd564
13 changed files with 75 additions and 57 deletions
|
@ -1,7 +1,11 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# vi:si:et:sw=4:sts=4:ts=4
|
# vi:si:et:sw=4:sts=4:ts=4
|
||||||
# GPL 2011
|
# GPL 2011
|
||||||
__version__ = '2.1.1'
|
try:
|
||||||
|
from . import __version
|
||||||
|
__version__ = __version.VERSION
|
||||||
|
except:
|
||||||
|
__version__ = '2.1.x'
|
||||||
|
|
||||||
from . import cache
|
from . import cache
|
||||||
from . import js
|
from . import js
|
||||||
|
|
1
Shared/lib/python3.4/site-packages/ox/__version.py
Normal file
1
Shared/lib/python3.4/site-packages/ox/__version.py
Normal file
|
@ -0,0 +1 @@
|
||||||
|
VERSION="2.1.670"
|
|
@ -5,7 +5,7 @@ from __future__ import with_statement
|
||||||
|
|
||||||
from six.moves import http_cookiejar as cookielib
|
from six.moves import http_cookiejar as cookielib
|
||||||
import gzip
|
import gzip
|
||||||
from six import StringIO
|
from six import BytesIO, PY2
|
||||||
from six.moves import urllib
|
from six.moves import urllib
|
||||||
from types import MethodType
|
from types import MethodType
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ class API(object):
|
||||||
else:
|
else:
|
||||||
self._cj = cookielib.CookieJar()
|
self._cj = cookielib.CookieJar()
|
||||||
self._opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(self._cj),
|
self._opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(self._cj),
|
||||||
urllib.HTTPHandler(debuglevel=self.debuglevel))
|
urllib.request.HTTPHandler(debuglevel=self.debuglevel))
|
||||||
self._opener.addheaders = [
|
self._opener.addheaders = [
|
||||||
('User-Agent', '%s/%s' % (self.__name__, self.__version__))
|
('User-Agent', '%s/%s' % (self.__name__, self.__version__))
|
||||||
]
|
]
|
||||||
|
@ -45,7 +45,10 @@ class API(object):
|
||||||
def _add_method(self, method, name):
|
def _add_method(self, method, name):
|
||||||
if name is None:
|
if name is None:
|
||||||
name = method.func_name
|
name = method.func_name
|
||||||
setattr(self, name, MethodType(method, self, type(self)))
|
if PY2:
|
||||||
|
setattr(self, name, MethodType(method, self, type(self)))
|
||||||
|
else:
|
||||||
|
setattr(self, name, MethodType(method, self))
|
||||||
|
|
||||||
def _add_action(self, action):
|
def _add_action(self, action):
|
||||||
def method(self, *args, **kw):
|
def method(self, *args, **kw):
|
||||||
|
@ -57,22 +60,30 @@ class API(object):
|
||||||
return self._request(action, kw)
|
return self._request(action, kw)
|
||||||
if 'doc' in self._properties[action]:
|
if 'doc' in self._properties[action]:
|
||||||
method.__doc__ = self._properties[action]['doc']
|
method.__doc__ = self._properties[action]['doc']
|
||||||
method.func_name = str(action)
|
if PY2:
|
||||||
|
method.func_name = str(action)
|
||||||
|
else:
|
||||||
|
method.func_name = action
|
||||||
self._add_method(method, action)
|
self._add_method(method, action)
|
||||||
|
|
||||||
def _json_request(self, url, form):
|
def _json_request(self, url, form):
|
||||||
result = {}
|
result = {}
|
||||||
try:
|
try:
|
||||||
body = str(form)
|
body = form.body()
|
||||||
request = urllib.reuqest.Request(str(url))
|
if PY2:
|
||||||
request.add_header('Content-type', form.get_content_type())
|
if not isinstance(url, bytes):
|
||||||
|
url = url.encode('utf-8')
|
||||||
|
request = urllib.request.Request(url)
|
||||||
|
request.add_data(body)
|
||||||
|
else:
|
||||||
|
request = urllib.request.Request(url, data=body, method='POST')
|
||||||
|
request.add_header('Content-Type', form.get_content_type())
|
||||||
request.add_header('Content-Length', str(len(body)))
|
request.add_header('Content-Length', str(len(body)))
|
||||||
request.add_header('Accept-Encoding', 'gzip, deflate')
|
request.add_header('Accept-Encoding', 'gzip, deflate')
|
||||||
request.add_data(body)
|
|
||||||
f = self._opener.open(request)
|
f = self._opener.open(request)
|
||||||
result = f.read()
|
result = f.read()
|
||||||
if f.headers.get('content-encoding', None) == 'gzip':
|
if f.headers.get('content-encoding', None) == 'gzip':
|
||||||
result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()
|
result = gzip.GzipFile(fileobj=BytesIO(result)).read()
|
||||||
result = result.decode('utf-8')
|
result = result.decode('utf-8')
|
||||||
return json.loads(result)
|
return json.loads(result)
|
||||||
except urllib.error.HTTPError as e:
|
except urllib.error.HTTPError as e:
|
||||||
|
|
|
@ -10,6 +10,7 @@ import os
|
||||||
from six import BytesIO
|
from six import BytesIO
|
||||||
import time
|
import time
|
||||||
from six.moves import urllib
|
from six.moves import urllib
|
||||||
|
from six import PY2
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
|
||||||
from .utils import json
|
from .utils import json
|
||||||
|
@ -24,6 +25,7 @@ COMPRESS_TYPES = (
|
||||||
'text/html',
|
'text/html',
|
||||||
'text/plain',
|
'text/plain',
|
||||||
'text/xml',
|
'text/xml',
|
||||||
|
'application/json',
|
||||||
'application/xhtml+xml',
|
'application/xhtml+xml',
|
||||||
'application/x-javascript',
|
'application/x-javascript',
|
||||||
'application/javascript',
|
'application/javascript',
|
||||||
|
@ -117,9 +119,8 @@ def save_url(url, filename, overwrite=False):
|
||||||
if not os.path.exists(dirname):
|
if not os.path.exists(dirname):
|
||||||
os.makedirs(dirname)
|
os.makedirs(dirname)
|
||||||
data = read_url(url)
|
data = read_url(url)
|
||||||
f = open(filename, 'w')
|
with open(filename, 'wb') as f:
|
||||||
f.write(data)
|
f.write(data)
|
||||||
f.close()
|
|
||||||
|
|
||||||
def cache_path():
|
def cache_path():
|
||||||
return os.environ.get('oxCACHE', os.path.expanduser('~/.ox/cache'))
|
return os.environ.get('oxCACHE', os.path.expanduser('~/.ox/cache'))
|
||||||
|
@ -203,7 +204,7 @@ class SQLiteCache(Cache):
|
||||||
elif value == 'data':
|
elif value == 'data':
|
||||||
if row[1] == 1:
|
if row[1] == 1:
|
||||||
r = zlib.decompress(r)
|
r = zlib.decompress(r)
|
||||||
else:
|
elif PY2:
|
||||||
r = str(r)
|
r = str(r)
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -290,7 +291,7 @@ class FileCache(Cache):
|
||||||
if value == 'headers':
|
if value == 'headers':
|
||||||
r = info['headers']
|
r = info['headers']
|
||||||
else:
|
else:
|
||||||
with open(f) as data:
|
with open(f, 'rb') as data:
|
||||||
r = data.read()
|
r = data.read()
|
||||||
if info['compressed']:
|
if info['compressed']:
|
||||||
r = zlib.decompress(r)
|
r = zlib.decompress(r)
|
||||||
|
@ -321,9 +322,11 @@ class FileCache(Cache):
|
||||||
if not info['only_headers']:
|
if not info['only_headers']:
|
||||||
if info['compressed']:
|
if info['compressed']:
|
||||||
data = zlib.compress(data)
|
data = zlib.compress(data)
|
||||||
with open(f, 'w') as _f:
|
elif not isinstance(data, str):
|
||||||
|
data = data.encode('utf-8')
|
||||||
|
with open(f, 'wb') as _f:
|
||||||
_f.write(data)
|
_f.write(data)
|
||||||
with open(i, 'w') as _i:
|
with open(i, 'wb') as _i:
|
||||||
json.dump(info, _i)
|
json.dump(info, _i)
|
||||||
|
|
||||||
if cache_path().startswith('fs:'):
|
if cache_path().startswith('fs:'):
|
||||||
|
|
|
@ -8,6 +8,8 @@ import mimetypes
|
||||||
import random
|
import random
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
from six import PY2
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['MultiPartForm']
|
__all__ = ['MultiPartForm']
|
||||||
|
|
||||||
|
@ -36,19 +38,19 @@ class MultiPartForm(object):
|
||||||
|
|
||||||
def add_field(self, name, value):
|
def add_field(self, name, value):
|
||||||
"""Add a simple field to the form data."""
|
"""Add a simple field to the form data."""
|
||||||
if isinstance(name, unicode):
|
if isinstance(name, bytes):
|
||||||
name = name.encode('utf-8')
|
name = name.decode('utf-8')
|
||||||
if isinstance(value, unicode):
|
if isinstance(value, bytes):
|
||||||
value = value.encode('utf-8')
|
value = value.decode('utf-8')
|
||||||
self.form_fields.append((name, value))
|
self.form_fields.append((name, value))
|
||||||
return
|
return
|
||||||
|
|
||||||
def add_file(self, fieldname, filename, fileHandle, mimetype=None):
|
def add_file(self, fieldname, filename, fileHandle, mimetype=None):
|
||||||
"""Add a file to be uploaded."""
|
"""Add a file to be uploaded."""
|
||||||
if isinstance(fieldname, unicode):
|
if isinstance(fieldname, bytes):
|
||||||
fieldname = fieldname.encode('utf-8')
|
fieldname = fieldname.decode('utf-8')
|
||||||
if isinstance(filename, unicode):
|
if isinstance(filename, bytes):
|
||||||
filename = filename.encode('utf-8')
|
filename = filename.decode('utf-8')
|
||||||
|
|
||||||
if hasattr(fileHandle, 'read'):
|
if hasattr(fileHandle, 'read'):
|
||||||
body = fileHandle.read()
|
body = fileHandle.read()
|
||||||
|
@ -58,8 +60,14 @@ class MultiPartForm(object):
|
||||||
mimetype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
|
mimetype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
|
||||||
self.files.append((fieldname, filename, mimetype, body))
|
self.files.append((fieldname, filename, mimetype, body))
|
||||||
return
|
return
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
body = self.body()
|
||||||
|
if not PY2:
|
||||||
|
body = body.decode('utf-8')
|
||||||
|
return body
|
||||||
|
|
||||||
|
def body(self):
|
||||||
"""Return a string representing the form data, including attached files."""
|
"""Return a string representing the form data, including attached files."""
|
||||||
# Build a list of lists, each containing "lines" of the
|
# Build a list of lists, each containing "lines" of the
|
||||||
# request. Each part is separated by a boundary string.
|
# request. Each part is separated by a boundary string.
|
||||||
|
@ -95,5 +103,6 @@ class MultiPartForm(object):
|
||||||
flattened = list(itertools.chain(*parts))
|
flattened = list(itertools.chain(*parts))
|
||||||
flattened.append('--' + self.boundary + '--')
|
flattened.append('--' + self.boundary + '--')
|
||||||
flattened.append('')
|
flattened.append('')
|
||||||
return '\r\n'.join(flattened)
|
flattened = [part if isinstance(part, bytes) else part.encode('utf-8') for part in flattened]
|
||||||
|
return b'\r\n'.join(flattened)
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ from __future__ import with_statement, print_function
|
||||||
import os
|
import os
|
||||||
import gzip
|
import gzip
|
||||||
import re
|
import re
|
||||||
from six import BytesIO
|
from six import BytesIO, PY3
|
||||||
import struct
|
import struct
|
||||||
from six.moves import urllib
|
from six.moves import urllib
|
||||||
|
|
||||||
|
@ -51,6 +51,8 @@ def open_url(url, data=None, headers=DEFAULT_HEADERS):
|
||||||
if isinstance(url, bytes):
|
if isinstance(url, bytes):
|
||||||
url = url.decode('utf-8')
|
url = url.decode('utf-8')
|
||||||
url = url.replace(' ', '%20')
|
url = url.replace(' ', '%20')
|
||||||
|
if data and PY3 and not isinstance(data, bytes):
|
||||||
|
data = data.encode('utf-8')
|
||||||
req = urllib.request.Request(url, data, headers)
|
req = urllib.request.Request(url, data, headers)
|
||||||
return urllib.request.urlopen(req)
|
return urllib.request.urlopen(req)
|
||||||
|
|
||||||
|
@ -104,9 +106,8 @@ def save_url(url, filename, overwrite=False):
|
||||||
if not os.path.exists(dirname):
|
if not os.path.exists(dirname):
|
||||||
os.makedirs(dirname)
|
os.makedirs(dirname)
|
||||||
data = read_url(url)
|
data = read_url(url)
|
||||||
f = open(filename, 'w')
|
with open(filename, 'wb') as f:
|
||||||
f.write(data)
|
f.write(data)
|
||||||
f.close()
|
|
||||||
|
|
||||||
def oshash(url):
|
def oshash(url):
|
||||||
def get_size(url):
|
def get_size(url):
|
||||||
|
|
|
@ -9,7 +9,6 @@ import unicodedata
|
||||||
from six.moves import urllib
|
from six.moves import urllib
|
||||||
from six import string_types
|
from six import string_types
|
||||||
|
|
||||||
|
|
||||||
from .. import find_re, strip_tags, decode_html
|
from .. import find_re, strip_tags, decode_html
|
||||||
from .. import cache
|
from .. import cache
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
# encoding: utf-8
|
# encoding: utf-8
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import re
|
import re
|
||||||
import urllib
|
from six.moves import urllib
|
||||||
|
|
||||||
from ox.cache import read_url
|
from ox.cache import read_url
|
||||||
from ox.html import decode_html, strip_tags
|
from ox.html import decode_html, strip_tags
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# vi:si:et:sw=4:sts=4:ts=4
|
# vi:si:et:sw=4:sts=4:ts=4
|
||||||
import re
|
import re
|
||||||
from urllib import quote
|
|
||||||
|
from six.moves.urllib.parse import quote
|
||||||
from lxml.html import document_fromstring
|
from lxml.html import document_fromstring
|
||||||
|
|
||||||
from ox.cache import read_url
|
from ox.cache import read_url
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
from urllib import quote
|
from six.moves.urllib.parse import quote
|
||||||
|
|
||||||
from ox.cache import read_url
|
from ox.cache import read_url
|
||||||
from ox import find_re, cache, strip_tags, decode_html, get_torrent_info, int_value, normalize_newlines
|
from ox import find_re, cache, strip_tags, decode_html, get_torrent_info, int_value, normalize_newlines
|
||||||
|
|
|
@ -2,9 +2,8 @@
|
||||||
# vi:si:et:sw=4:sts=4:ts=4
|
# vi:si:et:sw=4:sts=4:ts=4
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import re
|
import re
|
||||||
import socket
|
|
||||||
from urllib import quote, urlencode
|
from six.moves.urllib.parse import quote
|
||||||
from urllib2 import URLError
|
|
||||||
|
|
||||||
from ox import find_re, cache, strip_tags, decode_html, get_torrent_info, normalize_newlines
|
from ox import find_re, cache, strip_tags, decode_html, get_torrent_info, normalize_newlines
|
||||||
from ox.normalize import normalize_imdbid
|
from ox.normalize import normalize_imdbid
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
# vi:si:et:sw=4:sts=4:ts=4
|
# vi:si:et:sw=4:sts=4:ts=4
|
||||||
import re
|
import re
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from urllib import quote
|
from six.moves.urllib.parse import quote
|
||||||
|
|
||||||
import lxml.html
|
import lxml.html
|
||||||
import ox
|
import ox
|
||||||
|
|
|
@ -1,27 +1,17 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# vi:si:et:sw=4:sts=4:ts=4
|
# vi:si:et:sw=4:sts=4:ts=4
|
||||||
import re
|
|
||||||
from StringIO import StringIO
|
import json
|
||||||
import xml.etree.ElementTree as ET
|
|
||||||
|
|
||||||
from ox.cache import read_url
|
from ox.cache import read_url
|
||||||
from ox import find_string, find_re
|
|
||||||
|
|
||||||
|
|
||||||
def get_data(id):
|
def get_data(id):
|
||||||
url = 'http://www.vimeo.com/moogaloop/load/clip:%s' %id
|
url = 'http://vimeo.com/api/v2/video/%s.json' % id
|
||||||
xml = read_url(url)
|
data = json.loads(read_url(url).decode('utf-8'))[0]
|
||||||
tree = ET.parse(StringIO(xml))
|
|
||||||
request_signature = tree.find('request_signature').text
|
url = 'http://player.vimeo.com/video/%s/config?autoplay=0&byline=0&bypass_privacy=1&context=clip.main&default_to_hd=1&portrait=0' % id
|
||||||
request_signature_expires = tree.find('request_signature_expires').text
|
info = json.loads(read_url(url).decode('utf-8'))
|
||||||
|
data['video'] = info['request']['files']['h264']
|
||||||
data = {}
|
|
||||||
video_url = "http://www.vimeo.com/moogaloop/play/clip:%s/%s/%s/?q=" % \
|
|
||||||
(id, request_signature, request_signature_expires)
|
|
||||||
data['video_sd'] = video_url + 'sd'
|
|
||||||
data['video_hd'] = video_url + 'hd'
|
|
||||||
video = tree.find('video')
|
|
||||||
for key in ('caption', 'width', 'height', 'duration', 'thumbnail'):
|
|
||||||
data[key] = video.find(key).text
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue