From cc27cdc29df50f1807a0b6e63c800269d95cbe45 Mon Sep 17 00:00:00 2001 From: j <0x006A@0x2620.org> Date: Sun, 1 Jun 2008 13:48:01 +0200 Subject: [PATCH] migrate to oxutils --- oxdbarchive/cron.py | 4 ---- oxdbarchive/model.py | 6 +++--- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/oxdbarchive/cron.py b/oxdbarchive/cron.py index 64bb6ce..7584e4d 100644 --- a/oxdbarchive/cron.py +++ b/oxdbarchive/cron.py @@ -25,10 +25,6 @@ def extractNew(): for f in ArchiveFile.select(ArchiveFile.q.extracted == False): f.extractAll() -def cleanScrapeitCache(): - #remove files older than 30 days from scrapeit cache - os.system("""find /var/cache/scrapeit/ -type f -ctime +30 -exec rm '{}' \;""") - def cleanClipMovieCache(): cache = os.path.abspath('oxdb/cache/mini/') for d, p, files in os.walk(cache): diff --git a/oxdbarchive/model.py b/oxdbarchive/model.py index afce4ba..943db0e 100644 --- a/oxdbarchive/model.py +++ b/oxdbarchive/model.py @@ -18,7 +18,7 @@ import shutil import socket import simplejson -from scrapeit.utils import read_url +from oxutils.net import getUrl import subtitles import cache @@ -56,7 +56,7 @@ class Archive(SQLObject): url = "%s/%s?md5sum=%s" % (self.baseUrlFrontend, action, md5sum) #print url try: - result = read_url(url) + result = getUrl(url) except urllib2.HTTPError, e: print e.code print e.read() @@ -210,7 +210,7 @@ class Archive(SQLObject): def syncFrontend(self): dto = socket.getdefaulttimeout() socket.setdefaulttimeout(256) - data = read_url("%s/list" % self.baseUrlFrontend) + data = getUrl("%s/list" % self.baseUrlFrontend) md5sums = simplejson.loads(data)['files'] socket.setdefaulttimeout(dto) for md5sum in md5sums: