_cache_timeout is public
This commit is contained in:
parent
d6d408e117
commit
5204e27488
1 changed files with 7 additions and 7 deletions
|
@ -16,9 +16,9 @@ import net
|
||||||
from net import DEFAULT_HEADERS, getEncoding
|
from net import DEFAULT_HEADERS, getEncoding
|
||||||
|
|
||||||
|
|
||||||
_cache_timeout = 30*24*60*60 # default is 30 days
|
cache_timeout = 30*24*60*60 # default is 30 days
|
||||||
|
|
||||||
def status(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
|
def status(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
|
||||||
'''
|
'''
|
||||||
>>> status('http://google.com')
|
>>> status('http://google.com')
|
||||||
200
|
200
|
||||||
|
@ -28,7 +28,7 @@ def status(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
|
||||||
headers = getHeaders(url, data, headers)
|
headers = getHeaders(url, data, headers)
|
||||||
return int(headers['status'])
|
return int(headers['status'])
|
||||||
|
|
||||||
def exists(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
|
def exists(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
|
||||||
'''
|
'''
|
||||||
>>> exists('http://google.com')
|
>>> exists('http://google.com')
|
||||||
True
|
True
|
||||||
|
@ -40,7 +40,7 @@ def exists(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def getHeaders(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
|
def getHeaders(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
|
||||||
url_cache_file = "%s.headers" % _getUrlCacheFile(url, data, headers)
|
url_cache_file = "%s.headers" % _getUrlCacheFile(url, data, headers)
|
||||||
url_headers = _loadUrlCache(url_cache_file, timeout)
|
url_headers = _loadUrlCache(url_cache_file, timeout)
|
||||||
if url_headers:
|
if url_headers:
|
||||||
|
@ -50,7 +50,7 @@ def getHeaders(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
|
||||||
_saveUrlHeaders(url_cache_file, url_headers)
|
_saveUrlHeaders(url_cache_file, url_headers)
|
||||||
return url_headers
|
return url_headers
|
||||||
|
|
||||||
def getUrl(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
|
def getUrl(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
|
||||||
url_cache_file = _getUrlCacheFile(url, data, headers)
|
url_cache_file = _getUrlCacheFile(url, data, headers)
|
||||||
result = _loadUrlCache(url_cache_file, timeout)
|
result = _loadUrlCache(url_cache_file, timeout)
|
||||||
if not result:
|
if not result:
|
||||||
|
@ -65,7 +65,7 @@ def getUrl(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
|
||||||
_saveUrlCache(url_cache_file, result, url_headers)
|
_saveUrlCache(url_cache_file, result, url_headers)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def getUrlUnicode(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout, _getUrl=getUrl):
|
def getUrlUnicode(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout, _getUrl=getUrl):
|
||||||
data = _getUrl(url, data, headers, timeout)
|
data = _getUrl(url, data, headers, timeout)
|
||||||
encoding = getEncoding(data)
|
encoding = getEncoding(data)
|
||||||
if not encoding:
|
if not encoding:
|
||||||
|
@ -84,7 +84,7 @@ def _getUrlCacheFile(url, data=None, headers=DEFAULT_HEADERS):
|
||||||
domain = ".".join(urlparse.urlparse(url)[1].split('.')[-2:])
|
domain = ".".join(urlparse.urlparse(url)[1].split('.')[-2:])
|
||||||
return os.path.join(_getCacheBase(), domain, url_hash[:2], url_hash[2:4], url_hash[4:6], url_hash)
|
return os.path.join(_getCacheBase(), domain, url_hash[:2], url_hash[2:4], url_hash[4:6], url_hash)
|
||||||
|
|
||||||
def _loadUrlCache(url_cache_file, timeout=_cache_timeout):
|
def _loadUrlCache(url_cache_file, timeout=cache_timeout):
|
||||||
if timeout == 0:
|
if timeout == 0:
|
||||||
return None
|
return None
|
||||||
if os.path.exists(url_cache_file):
|
if os.path.exists(url_cache_file):
|
||||||
|
|
Loading…
Reference in a new issue