and do not fail for new files

This commit is contained in:
j 2008-03-19 13:58:48 +00:00
parent cf955e4e9f
commit 963d95ec0d

View file

@ -22,24 +22,23 @@ def read_url(url):
cache_file = "%sindex.html" % cache_file cache_file = "%sindex.html" % cache_file
if os.path.isdir(cache_file): if os.path.isdir(cache_file):
cache_file = os.path.join(cache_file, "index.html") cache_file = os.path.join(cache_file, "index.html")
ctime = os.stat(cache_file).st_ctime if os.path.exists(cache_file):
now = time.mktime(time.localtime()) ctime = os.stat(cache_file).st_ctime
file_age = now-ctime now = time.mktime(time.localtime())
print cache_timeout-file_age file_age = now-ctime
if file_age < cache_timeout and os.path.exists(cache_file): if file_age < cache_timeout:
f = open(cache_file) f = open(cache_file)
data = f.read() data = f.read()
f.close() f.close()
return data return data
else: data = utils.read_url(url)
data = utils.read_url(url) folder = os.path.dirname(cache_file)
folder = os.path.dirname(cache_file) if not os.path.exists(folder):
if not os.path.exists(folder): os.makedirs(folder)
os.makedirs(folder) f = open(cache_file, 'w')
f = open(cache_file, 'w') f.write(data)
f.write(data) f.close()
f.close() return data
return data
def read_url_utf8(url): def read_url_utf8(url):
data = read_url(url) data = read_url(url)