ups, do not replace os.path

This commit is contained in:
j 2008-03-19 13:54:20 +00:00
parent 36a70bb365
commit cf955e4e9f
2 changed files with 8 additions and 7 deletions

View File

@ -17,24 +17,24 @@ cache_base = "/var/cache/scrapeit/cache/"
cache_timeout = 30*24*60*60 # 30 days
def read_url(url):
cache_file = os.cache_file.join(cache_base, url.replace('http://',''))
cache_file = os.path.join(cache_base, url.replace('http://',''))
if cache_file.endswith('/'):
cache_file = "%sindex.html" % cache_file
if os.cache_file.isdir(cache_file):
cache_file = os.cache_file.join(cache_file, "index.html")
if os.path.isdir(cache_file):
cache_file = os.path.join(cache_file, "index.html")
ctime = os.stat(cache_file).st_ctime
now = time.mktime(time.localtime())
file_age = now-ctime
print cache_timeout-file_age
if file_age < cache_timeout and os.cache_file.exists(cache_file):
if file_age < cache_timeout and os.path.exists(cache_file):
f = open(cache_file)
data = f.read()
f.close()
return data
else:
data = utils.read_url(url)
folder = os.cache_file.dirname(cache_file)
if not os.cache_file.exists(folder):
folder = os.path.dirname(cache_file)
if not os.path.exists(folder):
os.makedirs(folder)
f = open(cache_file, 'w')
f.write(data)

View File

@ -132,7 +132,8 @@ def getInfo(piratebayID):
try:
txt = read_url(url).decode('utf-8', 'replace')
except URLError, e:
if e.code == 404:
print e.errno
if e.errno == 404:
return None
title = re.compile('<title>(.*?) \(download torrent\) - TPB</title>').findall(txt)[0]
movie = dict(