ups, do not replace os.path
This commit is contained in:
parent
36a70bb365
commit
cf955e4e9f
2 changed files with 8 additions and 7 deletions
|
@ -17,24 +17,24 @@ cache_base = "/var/cache/scrapeit/cache/"
|
||||||
cache_timeout = 30*24*60*60 # 30 days
|
cache_timeout = 30*24*60*60 # 30 days
|
||||||
|
|
||||||
def read_url(url):
|
def read_url(url):
|
||||||
cache_file = os.cache_file.join(cache_base, url.replace('http://',''))
|
cache_file = os.path.join(cache_base, url.replace('http://',''))
|
||||||
if cache_file.endswith('/'):
|
if cache_file.endswith('/'):
|
||||||
cache_file = "%sindex.html" % cache_file
|
cache_file = "%sindex.html" % cache_file
|
||||||
if os.cache_file.isdir(cache_file):
|
if os.path.isdir(cache_file):
|
||||||
cache_file = os.cache_file.join(cache_file, "index.html")
|
cache_file = os.path.join(cache_file, "index.html")
|
||||||
ctime = os.stat(cache_file).st_ctime
|
ctime = os.stat(cache_file).st_ctime
|
||||||
now = time.mktime(time.localtime())
|
now = time.mktime(time.localtime())
|
||||||
file_age = now-ctime
|
file_age = now-ctime
|
||||||
print cache_timeout-file_age
|
print cache_timeout-file_age
|
||||||
if file_age < cache_timeout and os.cache_file.exists(cache_file):
|
if file_age < cache_timeout and os.path.exists(cache_file):
|
||||||
f = open(cache_file)
|
f = open(cache_file)
|
||||||
data = f.read()
|
data = f.read()
|
||||||
f.close()
|
f.close()
|
||||||
return data
|
return data
|
||||||
else:
|
else:
|
||||||
data = utils.read_url(url)
|
data = utils.read_url(url)
|
||||||
folder = os.cache_file.dirname(cache_file)
|
folder = os.path.dirname(cache_file)
|
||||||
if not os.cache_file.exists(folder):
|
if not os.path.exists(folder):
|
||||||
os.makedirs(folder)
|
os.makedirs(folder)
|
||||||
f = open(cache_file, 'w')
|
f = open(cache_file, 'w')
|
||||||
f.write(data)
|
f.write(data)
|
||||||
|
|
|
@ -132,7 +132,8 @@ def getInfo(piratebayID):
|
||||||
try:
|
try:
|
||||||
txt = read_url(url).decode('utf-8', 'replace')
|
txt = read_url(url).decode('utf-8', 'replace')
|
||||||
except URLError, e:
|
except URLError, e:
|
||||||
if e.code == 404:
|
print e.errno
|
||||||
|
if e.errno == 404:
|
||||||
return None
|
return None
|
||||||
title = re.compile('<title>(.*?) \(download torrent\) - TPB</title>').findall(txt)[0]
|
title = re.compile('<title>(.*?) \(download torrent\) - TPB</title>').findall(txt)[0]
|
||||||
movie = dict(
|
movie = dict(
|
||||||
|
|
Loading…
Reference in a new issue