diff --git a/Shared/lib/python3.4/site-packages/ox/__init__.py b/Shared/lib/python3.4/site-packages/ox/__init__.py index 18a2a91..ce09413 100644 --- a/Shared/lib/python3.4/site-packages/ox/__init__.py +++ b/Shared/lib/python3.4/site-packages/ox/__init__.py @@ -1,7 +1,11 @@ # -*- coding: utf-8 -*- # vi:si:et:sw=4:sts=4:ts=4 # GPL 2011 -__version__ = '2.1.1' +try: + from . import __version + __version__ = __version.VERSION +except: + __version__ = '2.1.x' from . import cache from . import js diff --git a/Shared/lib/python3.4/site-packages/ox/__version.py b/Shared/lib/python3.4/site-packages/ox/__version.py new file mode 100644 index 0000000..0c52457 --- /dev/null +++ b/Shared/lib/python3.4/site-packages/ox/__version.py @@ -0,0 +1 @@ +VERSION="2.1.670" \ No newline at end of file diff --git a/Shared/lib/python3.4/site-packages/ox/api.py b/Shared/lib/python3.4/site-packages/ox/api.py index 2f6d3e5..3b57ae2 100644 --- a/Shared/lib/python3.4/site-packages/ox/api.py +++ b/Shared/lib/python3.4/site-packages/ox/api.py @@ -5,7 +5,7 @@ from __future__ import with_statement from six.moves import http_cookiejar as cookielib import gzip -from six import StringIO +from six import BytesIO, PY2 from six.moves import urllib from types import MethodType @@ -30,7 +30,7 @@ class API(object): else: self._cj = cookielib.CookieJar() self._opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(self._cj), - urllib.HTTPHandler(debuglevel=self.debuglevel)) + urllib.request.HTTPHandler(debuglevel=self.debuglevel)) self._opener.addheaders = [ ('User-Agent', '%s/%s' % (self.__name__, self.__version__)) ] @@ -45,7 +45,10 @@ class API(object): def _add_method(self, method, name): if name is None: name = method.func_name - setattr(self, name, MethodType(method, self, type(self))) + if PY2: + setattr(self, name, MethodType(method, self, type(self))) + else: + setattr(self, name, MethodType(method, self)) def _add_action(self, action): def method(self, *args, **kw): @@ -57,22 +60,30 @@ class API(object): return self._request(action, kw) if 'doc' in self._properties[action]: method.__doc__ = self._properties[action]['doc'] - method.func_name = str(action) + if PY2: + method.func_name = str(action) + else: + method.func_name = action self._add_method(method, action) def _json_request(self, url, form): result = {} try: - body = str(form) - request = urllib.reuqest.Request(str(url)) - request.add_header('Content-type', form.get_content_type()) + body = form.body() + if PY2: + if not isinstance(url, bytes): + url = url.encode('utf-8') + request = urllib.request.Request(url) + request.add_data(body) + else: + request = urllib.request.Request(url, data=body, method='POST') + request.add_header('Content-Type', form.get_content_type()) request.add_header('Content-Length', str(len(body))) request.add_header('Accept-Encoding', 'gzip, deflate') - request.add_data(body) f = self._opener.open(request) result = f.read() if f.headers.get('content-encoding', None) == 'gzip': - result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read() + result = gzip.GzipFile(fileobj=BytesIO(result)).read() result = result.decode('utf-8') return json.loads(result) except urllib.error.HTTPError as e: diff --git a/Shared/lib/python3.4/site-packages/ox/cache.py b/Shared/lib/python3.4/site-packages/ox/cache.py index 5b92452..123ec96 100644 --- a/Shared/lib/python3.4/site-packages/ox/cache.py +++ b/Shared/lib/python3.4/site-packages/ox/cache.py @@ -10,6 +10,7 @@ import os from six import BytesIO import time from six.moves import urllib +from six import PY2 import sqlite3 from .utils import json @@ -24,6 +25,7 @@ COMPRESS_TYPES = ( 'text/html', 'text/plain', 'text/xml', + 'application/json', 'application/xhtml+xml', 'application/x-javascript', 'application/javascript', @@ -117,9 +119,8 @@ def save_url(url, filename, overwrite=False): if not os.path.exists(dirname): os.makedirs(dirname) data = read_url(url) - f = open(filename, 'w') - f.write(data) - f.close() + with open(filename, 'wb') as f: + f.write(data) def cache_path(): return os.environ.get('oxCACHE', os.path.expanduser('~/.ox/cache')) @@ -203,7 +204,7 @@ class SQLiteCache(Cache): elif value == 'data': if row[1] == 1: r = zlib.decompress(r) - else: + elif PY2: r = str(r) break @@ -290,7 +291,7 @@ class FileCache(Cache): if value == 'headers': r = info['headers'] else: - with open(f) as data: + with open(f, 'rb') as data: r = data.read() if info['compressed']: r = zlib.decompress(r) @@ -321,9 +322,11 @@ class FileCache(Cache): if not info['only_headers']: if info['compressed']: data = zlib.compress(data) - with open(f, 'w') as _f: + elif not isinstance(data, str): + data = data.encode('utf-8') + with open(f, 'wb') as _f: _f.write(data) - with open(i, 'w') as _i: + with open(i, 'wb') as _i: json.dump(info, _i) if cache_path().startswith('fs:'): diff --git a/Shared/lib/python3.4/site-packages/ox/form.py b/Shared/lib/python3.4/site-packages/ox/form.py index 10d17ab..8969e03 100644 --- a/Shared/lib/python3.4/site-packages/ox/form.py +++ b/Shared/lib/python3.4/site-packages/ox/form.py @@ -8,6 +8,8 @@ import mimetypes import random import sys +from six import PY2 + __all__ = ['MultiPartForm'] @@ -36,19 +38,19 @@ class MultiPartForm(object): def add_field(self, name, value): """Add a simple field to the form data.""" - if isinstance(name, unicode): - name = name.encode('utf-8') - if isinstance(value, unicode): - value = value.encode('utf-8') + if isinstance(name, bytes): + name = name.decode('utf-8') + if isinstance(value, bytes): + value = value.decode('utf-8') self.form_fields.append((name, value)) return def add_file(self, fieldname, filename, fileHandle, mimetype=None): """Add a file to be uploaded.""" - if isinstance(fieldname, unicode): - fieldname = fieldname.encode('utf-8') - if isinstance(filename, unicode): - filename = filename.encode('utf-8') + if isinstance(fieldname, bytes): + fieldname = fieldname.decode('utf-8') + if isinstance(filename, bytes): + filename = filename.decode('utf-8') if hasattr(fileHandle, 'read'): body = fileHandle.read() @@ -58,8 +60,14 @@ class MultiPartForm(object): mimetype = mimetypes.guess_type(filename)[0] or 'application/octet-stream' self.files.append((fieldname, filename, mimetype, body)) return - + def __str__(self): + body = self.body() + if not PY2: + body = body.decode('utf-8') + return body + + def body(self): """Return a string representing the form data, including attached files.""" # Build a list of lists, each containing "lines" of the # request. Each part is separated by a boundary string. @@ -95,5 +103,6 @@ class MultiPartForm(object): flattened = list(itertools.chain(*parts)) flattened.append('--' + self.boundary + '--') flattened.append('') - return '\r\n'.join(flattened) + flattened = [part if isinstance(part, bytes) else part.encode('utf-8') for part in flattened] + return b'\r\n'.join(flattened) diff --git a/Shared/lib/python3.4/site-packages/ox/net.py b/Shared/lib/python3.4/site-packages/ox/net.py index a598d2f..b4a470b 100644 --- a/Shared/lib/python3.4/site-packages/ox/net.py +++ b/Shared/lib/python3.4/site-packages/ox/net.py @@ -5,7 +5,7 @@ from __future__ import with_statement, print_function import os import gzip import re -from six import BytesIO +from six import BytesIO, PY3 import struct from six.moves import urllib @@ -51,6 +51,8 @@ def open_url(url, data=None, headers=DEFAULT_HEADERS): if isinstance(url, bytes): url = url.decode('utf-8') url = url.replace(' ', '%20') + if data and PY3 and not isinstance(data, bytes): + data = data.encode('utf-8') req = urllib.request.Request(url, data, headers) return urllib.request.urlopen(req) @@ -104,9 +106,8 @@ def save_url(url, filename, overwrite=False): if not os.path.exists(dirname): os.makedirs(dirname) data = read_url(url) - f = open(filename, 'w') - f.write(data) - f.close() + with open(filename, 'wb') as f: + f.write(data) def oshash(url): def get_size(url): diff --git a/Shared/lib/python3.4/site-packages/ox/web/imdb.py b/Shared/lib/python3.4/site-packages/ox/web/imdb.py index af65154..6103fdb 100644 --- a/Shared/lib/python3.4/site-packages/ox/web/imdb.py +++ b/Shared/lib/python3.4/site-packages/ox/web/imdb.py @@ -9,7 +9,6 @@ import unicodedata from six.moves import urllib from six import string_types - from .. import find_re, strip_tags, decode_html from .. import cache diff --git a/Shared/lib/python3.4/site-packages/ox/web/itunes.py b/Shared/lib/python3.4/site-packages/ox/web/itunes.py index 9d775a1..886ff08 100644 --- a/Shared/lib/python3.4/site-packages/ox/web/itunes.py +++ b/Shared/lib/python3.4/site-packages/ox/web/itunes.py @@ -2,7 +2,7 @@ # encoding: utf-8 from __future__ import print_function import re -import urllib +from six.moves import urllib from ox.cache import read_url from ox.html import decode_html, strip_tags diff --git a/Shared/lib/python3.4/site-packages/ox/web/metacritic.py b/Shared/lib/python3.4/site-packages/ox/web/metacritic.py index e59504a..0e43c80 100644 --- a/Shared/lib/python3.4/site-packages/ox/web/metacritic.py +++ b/Shared/lib/python3.4/site-packages/ox/web/metacritic.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- # vi:si:et:sw=4:sts=4:ts=4 import re -from urllib import quote + +from six.moves.urllib.parse import quote from lxml.html import document_fromstring from ox.cache import read_url diff --git a/Shared/lib/python3.4/site-packages/ox/web/mininova.py b/Shared/lib/python3.4/site-packages/ox/web/mininova.py index eb78cab..799390c 100644 --- a/Shared/lib/python3.4/site-packages/ox/web/mininova.py +++ b/Shared/lib/python3.4/site-packages/ox/web/mininova.py @@ -3,7 +3,7 @@ from datetime import datetime import re import socket -from urllib import quote +from six.moves.urllib.parse import quote from ox.cache import read_url from ox import find_re, cache, strip_tags, decode_html, get_torrent_info, int_value, normalize_newlines diff --git a/Shared/lib/python3.4/site-packages/ox/web/thepiratebay.py b/Shared/lib/python3.4/site-packages/ox/web/thepiratebay.py index b751384..7002ebc 100644 --- a/Shared/lib/python3.4/site-packages/ox/web/thepiratebay.py +++ b/Shared/lib/python3.4/site-packages/ox/web/thepiratebay.py @@ -2,9 +2,8 @@ # vi:si:et:sw=4:sts=4:ts=4 from datetime import datetime import re -import socket -from urllib import quote, urlencode -from urllib2 import URLError + +from six.moves.urllib.parse import quote from ox import find_re, cache, strip_tags, decode_html, get_torrent_info, normalize_newlines from ox.normalize import normalize_imdbid diff --git a/Shared/lib/python3.4/site-packages/ox/web/twitter.py b/Shared/lib/python3.4/site-packages/ox/web/twitter.py index 45eff00..fa33bfc 100644 --- a/Shared/lib/python3.4/site-packages/ox/web/twitter.py +++ b/Shared/lib/python3.4/site-packages/ox/web/twitter.py @@ -2,7 +2,7 @@ # vi:si:et:sw=4:sts=4:ts=4 import re from datetime import datetime -from urllib import quote +from six.moves.urllib.parse import quote import lxml.html import ox diff --git a/Shared/lib/python3.4/site-packages/ox/web/vimeo.py b/Shared/lib/python3.4/site-packages/ox/web/vimeo.py index f51216f..70783ca 100644 --- a/Shared/lib/python3.4/site-packages/ox/web/vimeo.py +++ b/Shared/lib/python3.4/site-packages/ox/web/vimeo.py @@ -1,27 +1,17 @@ # -*- coding: utf-8 -*- # vi:si:et:sw=4:sts=4:ts=4 -import re -from StringIO import StringIO -import xml.etree.ElementTree as ET + +import json from ox.cache import read_url -from ox import find_string, find_re def get_data(id): - url = 'http://www.vimeo.com/moogaloop/load/clip:%s' %id - xml = read_url(url) - tree = ET.parse(StringIO(xml)) - request_signature = tree.find('request_signature').text - request_signature_expires = tree.find('request_signature_expires').text - - data = {} - video_url = "http://www.vimeo.com/moogaloop/play/clip:%s/%s/%s/?q=" % \ - (id, request_signature, request_signature_expires) - data['video_sd'] = video_url + 'sd' - data['video_hd'] = video_url + 'hd' - video = tree.find('video') - for key in ('caption', 'width', 'height', 'duration', 'thumbnail'): - data[key] = video.find(key).text + url = 'http://vimeo.com/api/v2/video/%s.json' % id + data = json.loads(read_url(url).decode('utf-8'))[0] + + url = 'http://player.vimeo.com/video/%s/config?autoplay=0&byline=0&bypass_privacy=1&context=clip.main&default_to_hd=1&portrait=0' % id + info = json.loads(read_url(url).decode('utf-8')) + data['video'] = info['request']['files']['h264'] return data