diff --git a/ox/cache.py b/ox/cache.py index b08e8d2..5d2aa61 100644 --- a/ox/cache.py +++ b/ox/cache.py @@ -13,6 +13,11 @@ import zlib from six import BytesIO from six.moves import urllib from six import PY2 +try: + import requests + USE_REQUESTS = True +except: + USE_REQUESTS = False from .utils import json from .file import makedirs @@ -94,19 +99,30 @@ def read_url(url, data=None, headers=None, timeout=cache_timeout, valid=None, un result = store.get(url, data, headers, timeout) url_headers = {} if not result: - try: - url_headers, result = net.read_url(url, data, headers, return_headers=True) - except urllib.error.HTTPError as e: - e.headers['Status'] = "%s" % e.code - for key in e.headers: - url_headers[key.lower()] = e.headers[key] - result = e.read() - if url_headers.get('content-encoding', None) == 'gzip': - result = gzip.GzipFile(fileobj=BytesIO(result)).read() - if not valid or valid(result, url_headers): - store.set(url, post_data=data, data=result, headers=url_headers) + if USE_REQUESTS: + r = requests.get(url, headers=headers) + for key in r.headers: + url_headers[key.lower()] = r.headers[key] + result = r.content + url_headers['Status'] = "%s" % r.status_code + if not valid or valid(result, url_headers): + store.set(url, post_data=data, data=result, headers=url_headers) + else: + raise InvalidResult(result, url_headers) else: - raise InvalidResult(result, url_headers) + try: + url_headers, result = net.read_url(url, data, headers, return_headers=True) + except urllib.error.HTTPError as e: + e.headers['Status'] = "%s" % e.code + for key in e.headers: + url_headers[key.lower()] = e.headers[key] + result = e.read() + if url_headers.get('content-encoding', None) == 'gzip': + result = gzip.GzipFile(fileobj=BytesIO(result)).read() + if not valid or valid(result, url_headers): + store.set(url, post_data=data, data=result, headers=url_headers) + else: + raise InvalidResult(result, url_headers) if unicode: ctype = url_headers.get('content-type', '').lower() if 'charset' in ctype: