rename to local functions in cache, remove capfirst, thats 'foo'.capitalize(), move tests to docts, add test.sh

This commit is contained in:
j 2008-07-06 11:42:41 +02:00
parent da35e9bc32
commit f449440838
4 changed files with 41 additions and 33 deletions

View file

@ -16,9 +16,9 @@ import net
from net import DEFAULT_HEADERS, getEncoding
cache_timeout = 30*24*60*60 # default is 30 days
_cache_timeout = 30*24*60*60 # default is 30 days
def status(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
def status(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
'''
>>> status('http://google.com')
200
@ -28,7 +28,7 @@ def status(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
headers = getHeaders(url, data, headers)
return int(headers['status'])
def exists(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
def exists(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
'''
>>> exists('http://google.com')
True
@ -40,19 +40,19 @@ def exists(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
return True
return False
def getHeaders(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
url_cache_file = "%s.headers" % getUrlCacheFile(url, data, headers)
url_headers = loadUrlCache(url_cache_file, timeout)
def getHeaders(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
url_cache_file = "%s.headers" % _getUrlCacheFile(url, data, headers)
url_headers = _loadUrlCache(url_cache_file, timeout)
if url_headers:
url_headers = simplejson.loads(url_headers)
else:
url_headers = net.getHeaders(url, data, headers)
saveUrlHeaders(url_cache_file, url_headers)
_saveUrlHeaders(url_cache_file, url_headers)
return url_headers
def getUrl(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
url_cache_file = getUrlCacheFile(url, data, headers)
result = loadUrlCache(url_cache_file, timeout)
def getUrl(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
url_cache_file = _getUrlCacheFile(url, data, headers)
result = _loadUrlCache(url_cache_file, timeout)
if not result:
try:
url_headers, result = net.getUrl(url, data, headers, returnHeaders=True)
@ -62,29 +62,29 @@ def getUrl(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
result = e.read()
if url_headers.get('content-encoding', None) == 'gzip':
result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()
saveUrlCache(url_cache_file, result, url_headers)
_saveUrlCache(url_cache_file, result, url_headers)
return result
def getUrlUnicode(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout, _getUrl=getUrl):
def getUrlUnicode(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout, _getUrl=getUrl):
data = _getUrl(url, data, headers, timeout)
encoding = getEncoding(data)
if not encoding:
encoding = 'latin-1'
return unicode(data, encoding)
def getCacheBase():
def _getCacheBase():
'cache base is eather ~/.ox/cache or can set via env variable oxCACHE'
return os.environ.get('oxCACHE', os.path.expanduser('~/.ox/cache'))
def getUrlCacheFile(url, data=None, headers=DEFAULT_HEADERS):
def _getUrlCacheFile(url, data=None, headers=DEFAULT_HEADERS):
if data:
url_hash = sha.sha(url + '?' + data).hexdigest()
else:
url_hash = sha.sha(url).hexdigest()
domain = ".".join(urlparse.urlparse(url)[1].split('.')[-2:])
return os.path.join(getCacheBase(), domain, url_hash[:2], url_hash[2:4], url_hash[4:6], url_hash)
return os.path.join(_getCacheBase(), domain, url_hash[:2], url_hash[2:4], url_hash[4:6], url_hash)
def loadUrlCache(url_cache_file, timeout=cache_timeout):
def _loadUrlCache(url_cache_file, timeout=_cache_timeout):
if timeout == 0:
return None
if os.path.exists(url_cache_file):
@ -98,16 +98,16 @@ def loadUrlCache(url_cache_file, timeout=cache_timeout):
return data
return None
def saveUrlCache(url_cache_file, data, headers):
def _saveUrlCache(url_cache_file, data, headers):
folder = os.path.dirname(url_cache_file)
if not os.path.exists(folder):
os.makedirs(folder)
f = open(url_cache_file, 'w')
f.write(data)
f.close()
saveUrlHeaders("%s.headers" % url_cache_file, headers)
_saveUrlHeaders("%s.headers" % url_cache_file, headers)
def saveUrlHeaders(url_cache_file, headers):
def _saveUrlHeaders(url_cache_file, headers):
folder = os.path.dirname(url_cache_file)
if not os.path.exists(folder):
os.makedirs(folder)

View file

@ -32,30 +32,40 @@ def to36(q):
def from36(q):
return int(q, 36)
def intValue(strValue, default=''):
def intValue(strValue, default=u''):
"""
>>> intValue('abc23')
u'23'
>>> intValue(' abc23')
u'23'
>>> intValue('ab')
u''
"""
try:
val = re.compile('(\d+)').findall(unicode(strValue).strip())[0]
except:
val = default
return val
def test_intValue():
assert intValue('abc23') == '23'
assert intValue(' abc23') == '23'
assert intValue(' abc') == ''
def floatValue(strValue, default=u''):
"""
>>> floatValue('abc23.4')
u'23.4'
def floatValue(strValue, default=''):
>>> floatValue(' abc23.4')
u'23.4'
>>> floatValue('ab')
u''
"""
try:
val = re.compile('([\d.]+)').findall(unicode(strValue).strip())[0]
except:
val = default
return val
def test_floatValue():
assert floatValue('abc23.4') == '23.4'
assert floatValue(' abc23.4') == '23.4'
assert floatValue(' abc') == ''
def formatNumber(number, longName, shortName):
"""
Return the number in a human-readable format (23 KB, 23.4 MB, 23.42 GB)

View file

@ -35,9 +35,6 @@ def findString(string, string0='', string1 = ''):
string1 = '$'
return findRe(string, string0 + '(.*?)' + string1)
# Capitalizes the first letter of a string.
capfirst = lambda x: x and x[0].upper() + x[1:]
def removeSpecialCharacters(text):
"""
Removes special characters inserted by Word.

1
test.sh Executable file
View file

@ -0,0 +1 @@
nosetests --with-doctest oxlib