rename to local functions in cache, remove capfirst, thats 'foo'.capitalize(), move tests to docts, add test.sh

This commit is contained in:
j 2008-07-06 11:42:41 +02:00
parent da35e9bc32
commit f449440838
4 changed files with 41 additions and 33 deletions

View File

@ -16,9 +16,9 @@ import net
from net import DEFAULT_HEADERS, getEncoding from net import DEFAULT_HEADERS, getEncoding
cache_timeout = 30*24*60*60 # default is 30 days _cache_timeout = 30*24*60*60 # default is 30 days
def status(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout): def status(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
''' '''
>>> status('http://google.com') >>> status('http://google.com')
200 200
@ -28,7 +28,7 @@ def status(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
headers = getHeaders(url, data, headers) headers = getHeaders(url, data, headers)
return int(headers['status']) return int(headers['status'])
def exists(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout): def exists(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
''' '''
>>> exists('http://google.com') >>> exists('http://google.com')
True True
@ -40,19 +40,19 @@ def exists(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
return True return True
return False return False
def getHeaders(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout): def getHeaders(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
url_cache_file = "%s.headers" % getUrlCacheFile(url, data, headers) url_cache_file = "%s.headers" % _getUrlCacheFile(url, data, headers)
url_headers = loadUrlCache(url_cache_file, timeout) url_headers = _loadUrlCache(url_cache_file, timeout)
if url_headers: if url_headers:
url_headers = simplejson.loads(url_headers) url_headers = simplejson.loads(url_headers)
else: else:
url_headers = net.getHeaders(url, data, headers) url_headers = net.getHeaders(url, data, headers)
saveUrlHeaders(url_cache_file, url_headers) _saveUrlHeaders(url_cache_file, url_headers)
return url_headers return url_headers
def getUrl(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout): def getUrl(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout):
url_cache_file = getUrlCacheFile(url, data, headers) url_cache_file = _getUrlCacheFile(url, data, headers)
result = loadUrlCache(url_cache_file, timeout) result = _loadUrlCache(url_cache_file, timeout)
if not result: if not result:
try: try:
url_headers, result = net.getUrl(url, data, headers, returnHeaders=True) url_headers, result = net.getUrl(url, data, headers, returnHeaders=True)
@ -62,29 +62,29 @@ def getUrl(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
result = e.read() result = e.read()
if url_headers.get('content-encoding', None) == 'gzip': if url_headers.get('content-encoding', None) == 'gzip':
result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read() result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()
saveUrlCache(url_cache_file, result, url_headers) _saveUrlCache(url_cache_file, result, url_headers)
return result return result
def getUrlUnicode(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout, _getUrl=getUrl): def getUrlUnicode(url, data=None, headers=DEFAULT_HEADERS, timeout=_cache_timeout, _getUrl=getUrl):
data = _getUrl(url, data, headers, timeout) data = _getUrl(url, data, headers, timeout)
encoding = getEncoding(data) encoding = getEncoding(data)
if not encoding: if not encoding:
encoding = 'latin-1' encoding = 'latin-1'
return unicode(data, encoding) return unicode(data, encoding)
def getCacheBase(): def _getCacheBase():
'cache base is eather ~/.ox/cache or can set via env variable oxCACHE' 'cache base is eather ~/.ox/cache or can set via env variable oxCACHE'
return os.environ.get('oxCACHE', os.path.expanduser('~/.ox/cache')) return os.environ.get('oxCACHE', os.path.expanduser('~/.ox/cache'))
def getUrlCacheFile(url, data=None, headers=DEFAULT_HEADERS): def _getUrlCacheFile(url, data=None, headers=DEFAULT_HEADERS):
if data: if data:
url_hash = sha.sha(url + '?' + data).hexdigest() url_hash = sha.sha(url + '?' + data).hexdigest()
else: else:
url_hash = sha.sha(url).hexdigest() url_hash = sha.sha(url).hexdigest()
domain = ".".join(urlparse.urlparse(url)[1].split('.')[-2:]) domain = ".".join(urlparse.urlparse(url)[1].split('.')[-2:])
return os.path.join(getCacheBase(), domain, url_hash[:2], url_hash[2:4], url_hash[4:6], url_hash) return os.path.join(_getCacheBase(), domain, url_hash[:2], url_hash[2:4], url_hash[4:6], url_hash)
def loadUrlCache(url_cache_file, timeout=cache_timeout): def _loadUrlCache(url_cache_file, timeout=_cache_timeout):
if timeout == 0: if timeout == 0:
return None return None
if os.path.exists(url_cache_file): if os.path.exists(url_cache_file):
@ -98,16 +98,16 @@ def loadUrlCache(url_cache_file, timeout=cache_timeout):
return data return data
return None return None
def saveUrlCache(url_cache_file, data, headers): def _saveUrlCache(url_cache_file, data, headers):
folder = os.path.dirname(url_cache_file) folder = os.path.dirname(url_cache_file)
if not os.path.exists(folder): if not os.path.exists(folder):
os.makedirs(folder) os.makedirs(folder)
f = open(url_cache_file, 'w') f = open(url_cache_file, 'w')
f.write(data) f.write(data)
f.close() f.close()
saveUrlHeaders("%s.headers" % url_cache_file, headers) _saveUrlHeaders("%s.headers" % url_cache_file, headers)
def saveUrlHeaders(url_cache_file, headers): def _saveUrlHeaders(url_cache_file, headers):
folder = os.path.dirname(url_cache_file) folder = os.path.dirname(url_cache_file)
if not os.path.exists(folder): if not os.path.exists(folder):
os.makedirs(folder) os.makedirs(folder)

View File

@ -32,30 +32,40 @@ def to36(q):
def from36(q): def from36(q):
return int(q, 36) return int(q, 36)
def intValue(strValue, default=''): def intValue(strValue, default=u''):
"""
>>> intValue('abc23')
u'23'
>>> intValue(' abc23')
u'23'
>>> intValue('ab')
u''
"""
try: try:
val = re.compile('(\d+)').findall(unicode(strValue).strip())[0] val = re.compile('(\d+)').findall(unicode(strValue).strip())[0]
except: except:
val = default val = default
return val return val
def test_intValue(): def floatValue(strValue, default=u''):
assert intValue('abc23') == '23' """
assert intValue(' abc23') == '23' >>> floatValue('abc23.4')
assert intValue(' abc') == '' u'23.4'
def floatValue(strValue, default=''): >>> floatValue(' abc23.4')
u'23.4'
>>> floatValue('ab')
u''
"""
try: try:
val = re.compile('([\d.]+)').findall(unicode(strValue).strip())[0] val = re.compile('([\d.]+)').findall(unicode(strValue).strip())[0]
except: except:
val = default val = default
return val return val
def test_floatValue():
assert floatValue('abc23.4') == '23.4'
assert floatValue(' abc23.4') == '23.4'
assert floatValue(' abc') == ''
def formatNumber(number, longName, shortName): def formatNumber(number, longName, shortName):
""" """
Return the number in a human-readable format (23 KB, 23.4 MB, 23.42 GB) Return the number in a human-readable format (23 KB, 23.4 MB, 23.42 GB)

View File

@ -35,9 +35,6 @@ def findString(string, string0='', string1 = ''):
string1 = '$' string1 = '$'
return findRe(string, string0 + '(.*?)' + string1) return findRe(string, string0 + '(.*?)' + string1)
# Capitalizes the first letter of a string.
capfirst = lambda x: x and x[0].upper() + x[1:]
def removeSpecialCharacters(text): def removeSpecialCharacters(text):
""" """
Removes special characters inserted by Word. Removes special characters inserted by Word.

1
test.sh Executable file
View File

@ -0,0 +1 @@
nosetests --with-doctest oxlib